Step (1) Import Libraries Step (2) Load 5 Datasets Step (3) Extract MFCC Features Step (4) Pre-Processing Step (5) Split Dataset in Traing and Testing Step (6) Train CNN Classifier Step (7) Train MLP Classifier Step (8) Comparision of CNN and MLP #Import Libraries
import os
import sys
import glob
import keras
import pickle
import sklearn
import librosa
import warnings
import collections
import numpy as np
import pandas as pd
import seaborn as sns
import librosa.display
import tensorflow as tf
from scipy import stats
from tqdm import notebook
import IPython.display as ipd
import scipy.io.wavfile as wav
from keras.layers import Dense
import matplotlib.pyplot as plt
from keras.layers import Conv2D
from keras.layers import Flatten
warnings.filterwarnings("ignore")
from sklearn import preprocessing
from keras.models import Sequential
from prettytable import PrettyTable
from matplotlib.pyplot import figure
tf.config.run_functions_eagerly(True)
from keras.layers import Bidirectional
from tensorflow.keras.layers import GRU
from astropy.table import Table, Column
from python_speech_features import mfcc
from keras.layers import TimeDistributed
from keras.layers.convolutional import Conv1D
from sklearn.preprocessing import MinMaxScaler
from sklearn.neural_network import MLPClassifier
from tensorflow.keras.utils import to_categorical
from imblearn.over_sampling import RandomOverSampler
from sklearn.model_selection import train_test_split
from keras.callbacks import ReduceLROnPlateau, ModelCheckpoint
from keras.layers import Dense, Conv1D, MaxPooling1D, Flatten, Dropout, BatchNormalization
from sklearn.metrics import accuracy_score, confusion_matrix, balanced_accuracy_score, classification_report
(1) SAVEE Database:
Datset Link(2) EmoDB:
Datset Link(3) CREMA-D:
Datset Link(4) TESS:
Datset Link(5) RAVDESS:
Datset Link# Load Sample Data
print("\nSample Data:")
print("============\n")
path = ('Datasets/SAVEE Database')
sample_data_1 = [os.path.join(dp, f) for dp, dn, filenames in os.walk(path)
for f in filenames if os.path.splitext(f)[1] == '.wav']
print("\nDisplaying Some Instances from Sample Data: ")
print("===========================================\n")
for i in range(5):
print("Audio: ")
ipd.display(ipd.Audio(sample_data_1[i*100]))
data, sampling_rate = librosa.load(sample_data_1[i*100])
plt.figure(figsize=(10, 2),facecolor="Green")
librosa.display.waveplot(data, sampling_rate)
Sample Data: ============ Displaying Some Instances from Sample Data: =========================================== Audio:
Audio:
Audio:
Audio:
Audio:
# Load Sample Data
print("\nSample Data:")
print("============\n")
path = ('Datasets/EmoDB')
sample_data_2 = [os.path.join(dp, f) for dp, dn, filenames in os.walk(path)
for f in filenames if os.path.splitext(f)[1] == '.wav']
print("\nDisplaying Some Instances from Sample Data: ")
print("===========================================\n")
for i in range(5):
print("Audio: ")
ipd.display(ipd.Audio(sample_data_2[i*100]))
data, sampling_rate = librosa.load(sample_data_2[i*100])
plt.figure(figsize=(10, 2),facecolor="blue")
librosa.display.waveplot(data, sampling_rate)
Sample Data: ============ Displaying Some Instances from Sample Data: =========================================== Audio:
Audio:
Audio:
Audio:
Audio:
# Load Sample Data
print("\nSample Data:")
print("============\n")
path = ('Datasets/CREMA-D')
sample_data_3 = [os.path.join(dp, f) for dp, dn, filenames in os.walk(path)
for f in filenames if os.path.splitext(f)[1] == '.wav']
print("\nDisplaying Some Instances from Sample Data: ")
print("===========================================\n")
for i in range(5):
print("Audio: ")
ipd.display(ipd.Audio(sample_data_3[i*100]))
data, sampling_rate = librosa.load(sample_data_3[i*100])
plt.figure(figsize=(10, 2),facecolor="brown")
librosa.display.waveplot(data, sampling_rate)
Sample Data: ============ Displaying Some Instances from Sample Data: =========================================== Audio:
Audio:
Audio:
Audio:
Audio:
# Load Sample Data
print("\nSample Data:")
print("============\n")
path = ('Datasets/TESS')
sample_data_4 = [os.path.join(dp, f) for dp, dn, filenames in os.walk(path)
for f in filenames if os.path.splitext(f)[1] == '.wav']
print("\nDisplaying Some Instances from Sample Data: ")
print("===========================================\n")
for i in range(5):
print("Audio: ")
ipd.display(ipd.Audio(sample_data_4[i*100]))
data, sampling_rate = librosa.load(sample_data_4[i*100])
plt.figure(figsize=(10, 2),facecolor="gray")
librosa.display.waveplot(data, sampling_rate)
Sample Data: ============ Displaying Some Instances from Sample Data: =========================================== Audio:
Audio:
Audio:
Audio:
Audio:
# Load Sample Data
print("\nSample Data:")
print("============\n")
path = ('Datasets/RAVDESS')
sample_data_5 = [os.path.join(dp, f) for dp, dn, filenames in os.walk(path)
for f in filenames if os.path.splitext(f)[1] == '.wav']
print("\nDisplaying Some Instances from Sample Data: ")
print("===========================================\n")
for i in range(5):
print("Audio: ")
ipd.display(ipd.Audio(sample_data_5[i*100]))
data, sampling_rate = librosa.load(sample_data_5[i*100])
plt.figure(figsize=(10, 2),facecolor="yellow")
librosa.display.waveplot(data, sampling_rate)
Sample Data: ============ Displaying Some Instances from Sample Data: =========================================== Audio:
Audio:
Audio:
Audio:
Audio:
sample_data_mfcc_features_1 = pd.DataFrame()
for i in notebook.tqdm(range(len(sample_data_1))):
data, sampling_rate = librosa.load(sample_data_1[i])
# Get MFCC Features
mfcc_features = mfcc(data,sampling_rate,winlen=30,nfft=661500)
# Convert to DataFrame
mfcc_df = pd.DataFrame(mfcc_features)
if(sample_data_1[i][27:-6] == 'a'):
mfcc_df["Emotion"]="Anger"
elif(sample_data_1[i][27:-6] == 'd'):
mfcc_df["Emotion"]="Disgust"
elif(sample_data_1[i][27:-6] == 'f'):
mfcc_df["Emotion"]="Fear"
elif(sample_data_1[i][27:-6] == 'h'):
mfcc_df["Emotion"]="Happiness"
elif(sample_data_1[i][27:-6] == 'n'):
mfcc_df["Emotion"]="Neutral"
elif(sample_data_1[i][27:-6] == 'sa'):
mfcc_df["Emotion"]="Sadness"
elif(sample_data_1[i][27:-6] == 'su'):
mfcc_df["Emotion"]="Surprise"
else:
mfcc_df["Emotion"]="Other"
mfcc_df["Gender"] = "Male"
sample_data_mfcc_features_1 = sample_data_mfcc_features_1.append(mfcc_df)
# Save to CSV
sample_data_mfcc_features_1.to_csv('Audio Features/sample_data_mfcc_features_1.csv',index= False, mode='w', header=True)
# Load Features
features_1 = pd.read_csv('Audio Features/sample_data_mfcc_features_1.csv')
features_1
| 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | Emotion | Gender | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 3.203335 | 13.795782 | -19.908137 | 23.113571 | -41.785224 | 5.834210 | -52.924076 | -2.088703 | -14.617876 | 4.936264 | -15.795315 | 11.496223 | -4.460716 | Anger | Male |
| 1 | 3.446474 | 23.732635 | -30.598307 | 4.388828 | -28.666846 | 13.383600 | -52.968102 | 4.408833 | 2.814173 | -10.741523 | -14.331456 | 11.751246 | -8.396209 | Anger | Male |
| 2 | 3.476206 | 17.949951 | -30.643702 | 4.392290 | -35.739255 | -8.915932 | -46.324606 | 6.564549 | -7.061853 | -16.553248 | -16.938792 | 8.475676 | -20.742700 | Anger | Male |
| 3 | 3.558089 | 18.276150 | -20.346620 | -1.583971 | -30.147299 | 0.963494 | -54.361284 | -9.811369 | -10.147195 | -11.144327 | -1.306936 | 7.505639 | 0.572708 | Anger | Male |
| 4 | 3.764346 | 18.764859 | -24.920929 | 1.265585 | -34.814864 | -3.281257 | -50.514474 | -1.195924 | -6.026913 | -8.751210 | -12.911233 | 8.015905 | -6.634611 | Anger | Male |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 475 | -2.891536 | 9.977441 | -8.494620 | 20.642597 | -43.986634 | -5.818822 | -42.747704 | 1.623718 | -5.636403 | -5.261166 | -19.011852 | 10.283520 | -0.037808 | Surprise | Male |
| 476 | -2.373482 | 13.664592 | -14.080212 | 22.486546 | -38.038503 | -3.001456 | -38.062197 | -3.495917 | -15.125975 | -11.673487 | -15.574197 | -0.432495 | -6.982044 | Surprise | Male |
| 477 | -2.244675 | 21.429362 | -11.647796 | 9.177692 | -29.520397 | -15.366208 | -43.935295 | 10.374315 | -28.431255 | -12.655181 | -9.804251 | -1.886984 | 4.831945 | Surprise | Male |
| 478 | -1.600036 | 13.877167 | -13.671807 | 13.773882 | -43.390877 | -1.317022 | -45.268181 | -2.391161 | -21.976652 | -9.785898 | -4.528661 | 14.398901 | -6.945704 | Surprise | Male |
| 479 | -1.418079 | 12.886012 | -9.240951 | 11.856341 | -56.535884 | 2.798735 | -38.455837 | 0.368887 | -21.125617 | -7.183570 | -11.136310 | 10.025685 | -2.063207 | Surprise | Male |
480 rows × 15 columns
sample_data_mfcc_features_2 = pd.DataFrame()
for i in notebook.tqdm(range(len(sample_data_2))):
data, sampling_rate = librosa.load(sample_data_2[i])
# Get MFCC Features
mfcc_features = mfcc(data,sampling_rate,winlen=30,nfft=661500)
# Convert to DataFrame
mfcc_df = pd.DataFrame(mfcc_features)
if(sample_data_2[i][24:25] == 'W'):
mfcc_df["Emotion"]="Anger"
elif(sample_data_2[i][24:25] == 'E'):
mfcc_df["Emotion"]="Disgust"
elif(sample_data_2[i][24:25] == 'A'):
mfcc_df["Emotion"]="Fear"
elif(sample_data_2[i][24:25] == 'F'):
mfcc_df["Emotion"]="Happiness"
elif(sample_data_2[i][24:25] == 'N'):
mfcc_df["Emotion"]="Neutral"
elif(sample_data_2[i][24:25] == 'T'):
mfcc_df["Emotion"]="Sadness"
elif(sample_data_2[i][24:25] == 'L'):
mfcc_df["Emotion"]="Boredom"
else:
mfcc_df["Emotion"]="Other"
if(sample_data_2[i][19:21] == '03' or
sample_data_2[i][19:21] == '10' or
sample_data_2[i][19:21] == '11' or
sample_data_2[i][19:21] == '12' or
sample_data_2[i][19:21] == '15' ):
mfcc_df["Gender"]="Male"
else:
mfcc_df["Gender"]="Female"
sample_data_mfcc_features_2 = sample_data_mfcc_features_2.append(mfcc_df)
# Save to CSV
sample_data_mfcc_features_2.to_csv('Audio Features/sample_data_mfcc_features_2.csv',index= False, mode='w', header=True)
# Load Features
features_2 = pd.read_csv('Audio Features/sample_data_mfcc_features_2.csv')
features_2
| 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | Emotion | Gender | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 2.581027 | 15.767412 | -47.700512 | 35.579753 | -70.270488 | 46.800920 | -58.114879 | 37.216890 | -49.564047 | 31.066088 | -17.529903 | 3.279682 | -12.838690 | Happiness | Male |
| 1 | 2.795378 | 24.843764 | -51.506450 | 49.454440 | -72.968111 | 55.298546 | -74.268759 | 41.864338 | -59.045060 | 38.514404 | 0.004737 | 10.388288 | -14.279588 | Neutral | Male |
| 2 | 4.170786 | 14.397566 | -71.067858 | 40.482368 | -84.346462 | 58.019575 | -81.388417 | 53.770739 | -75.670880 | 40.587622 | -35.743369 | 21.891265 | -25.008990 | Anger | Male |
| 3 | 3.337494 | 16.282372 | -50.880033 | 31.990281 | -85.102191 | 50.261715 | -84.640713 | 46.940764 | -82.094347 | 32.629892 | -49.312384 | 7.776564 | -22.946343 | Happiness | Male |
| 4 | 2.707667 | 24.334550 | -45.717779 | 53.691805 | -71.204936 | 54.309667 | -80.084345 | 44.830838 | -61.581425 | 38.328134 | -23.163505 | 17.898031 | -5.970665 | Neutral | Male |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 530 | 3.863628 | 19.842292 | -52.308186 | 48.280323 | -72.358978 | 54.636782 | -87.941838 | 44.757594 | -59.136477 | 49.136846 | -35.821170 | 15.900585 | -32.775497 | Boredom | Female |
| 531 | 4.198406 | 4.509584 | -34.055637 | 48.250139 | -48.483522 | 48.624353 | -72.094906 | 19.418505 | -74.377215 | 30.325926 | -38.960475 | -1.243505 | -27.763525 | Sadness | Female |
| 532 | 3.450846 | 1.809250 | -26.119918 | 58.439039 | -41.793259 | 43.821996 | -58.959182 | 33.575929 | -56.189126 | 32.773332 | -32.477892 | 8.302056 | -14.578023 | Sadness | Female |
| 533 | 3.457763 | 7.503398 | -88.067662 | 40.491302 | -88.680502 | 46.283885 | -81.724622 | 27.269826 | -67.302466 | 10.171340 | -50.524035 | 11.235342 | -32.179386 | Anger | Female |
| 534 | 4.445711 | 8.212223 | -81.494063 | 45.600778 | -89.101499 | 32.327346 | -92.760920 | 36.413223 | -79.412132 | 31.038278 | -60.340300 | 26.689882 | -26.373804 | Anger | Female |
535 rows × 15 columns
sample_data_mfcc_features_3 = pd.DataFrame()
female = [1002,1003,1004,1006,1007,1008,1009,1010,1012,1013,1018,1020,1021,1024,1025,1028,1029,1030,1037,1043,1046,1047,1049,
1052,1053,1054,1055,1056,1058,1060,1061,1063,1072,1073,1074,1075,1076,1078,1079,1082,1084,1089,1091]
for i in notebook.tqdm(range(len(sample_data_3))):
data, sampling_rate = librosa.load(sample_data_3[i])
# Get MFCC Features
mfcc_features = mfcc(data,sampling_rate,winlen=30,nfft=661500)
# Convert to DataFrame
mfcc_df = pd.DataFrame(mfcc_features)
if(sample_data_3[i][26:29] == 'ANG'):
mfcc_df["Emotion"]="Anger"
elif(sample_data_3[i][26:29] == 'DIS'):
mfcc_df["Emotion"]="Disgust"
elif(sample_data_3[i][26:29] == 'FEA'):
mfcc_df["Emotion"]="Fear"
elif(sample_data_3[i][26:29] == 'HAP'):
mfcc_df["Emotion"]="Happiness"
elif(sample_data_3[i][26:29] == 'NEU'):
mfcc_df["Emotion"]="Neutral"
elif(sample_data_3[i][26:29] == 'SAD'):
mfcc_df["Emotion"]="Sadness"
else:
mfcc_df["Emotion"]="Other"
if(int(sample_data_3[i][17:].split('_')[0]) in female):
mfcc_df["Gender"]="Female"
else:
mfcc_df["Gender"]="Male"
sample_data_mfcc_features_3 = sample_data_mfcc_features_3.append(mfcc_df)
# Save to CSV
sample_data_mfcc_features_3.to_csv('Audio Features/sample_data_mfcc_features_3.csv',index= False, mode='w', header=True)
# Load Features
features_3 = pd.read_csv('Audio Features/sample_data_mfcc_features_3.csv')
features_3
| 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | Emotion | Gender | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 2.368902 | 23.902528 | -65.345819 | 44.065375 | -74.662445 | 51.996950 | -88.516855 | 53.855236 | -78.723548 | 69.285800 | -59.639412 | 39.177042 | -23.609966 | Anger | Male |
| 1 | 0.307114 | 13.163935 | -48.153023 | 33.389668 | -49.739224 | 34.304775 | -69.983706 | 35.961724 | -49.892039 | 39.105544 | -31.849578 | 27.108298 | 2.069042 | Disgust | Male |
| 2 | 2.603431 | 10.167095 | -47.088421 | 13.140881 | -51.376008 | 24.318749 | -71.462424 | 20.749077 | -50.124086 | 37.938860 | -31.598722 | 8.348972 | -5.282255 | Fear | Male |
| 3 | 2.140128 | 11.848147 | -47.720734 | 23.389854 | -52.047988 | 23.094227 | -65.452202 | 24.289967 | -50.372034 | 35.510180 | -35.682490 | 11.530250 | 8.193065 | Happiness | Male |
| 4 | 0.473232 | 12.135419 | -43.704434 | 31.896648 | -43.032738 | 21.278558 | -59.340704 | 9.792648 | -45.896816 | 39.344912 | -31.064415 | 19.842668 | 10.652224 | Neutral | Male |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 7437 | -0.569248 | 7.090084 | -21.821569 | 21.218883 | -70.987375 | 58.362293 | -53.937357 | 59.089649 | -57.824928 | 26.901631 | -25.225629 | 5.655281 | -13.951756 | Disgust | Female |
| 7438 | 0.275142 | -7.106827 | -12.841243 | 33.026974 | -53.708565 | 49.734721 | -53.718731 | 28.586449 | -38.315850 | 26.497463 | -24.172343 | -5.992458 | -15.062148 | Fear | Female |
| 7439 | 0.081269 | 6.700789 | -24.036068 | 18.961563 | -59.892895 | 46.793140 | -65.963296 | 37.240255 | -53.570069 | 28.635201 | -29.254545 | -2.764532 | -8.967688 | Happiness | Female |
| 7440 | -0.233033 | 8.786678 | -29.184771 | 14.561491 | -73.676943 | 47.898016 | -35.411657 | 53.926727 | -62.481850 | 23.092826 | -16.729014 | 1.414838 | -16.098669 | Neutral | Female |
| 7441 | -0.907600 | 5.432492 | -20.481048 | 23.180579 | -67.214288 | 55.307229 | -53.056532 | 48.972602 | -48.267778 | 34.530610 | -17.210164 | -2.257076 | -11.995452 | Sadness | Female |
7442 rows × 15 columns
sample_data_mfcc_features_4 = pd.DataFrame()
for i in notebook.tqdm(range(len(sample_data_4))):
data, sampling_rate = librosa.load(sample_data_4[i])
# Get MFCC Features
mfcc_features = mfcc(data,sampling_rate,winlen=30,nfft=661500)
# Convert to DataFrame
mfcc_df = pd.DataFrame(mfcc_features)
if(sample_data_4[i].split('_')[2][:-4] == 'angry'):
mfcc_df["Emotion"]="Anger"
elif(sample_data_4[i].split('_')[2][:-4] == 'disgust'):
mfcc_df["Emotion"]="Disgust"
elif(sample_data_4[i].split('_')[2][:-4] == 'fear'):
mfcc_df["Emotion"]="Fear"
elif(sample_data_4[i].split('_')[2][:-4] == 'happy'):
mfcc_df["Emotion"]="Happiness"
elif(sample_data_4[i].split('_')[2][:-4] == 'neutral'):
mfcc_df["Emotion"]="Neutral"
elif(sample_data_4[i].split('_')[2][:-4] == 'ps'):
mfcc_df["Emotion"]="Surprise"
elif(sample_data_4[i].split('_')[2][:-4] == 'sad'):
mfcc_df["Emotion"]="Sadness"
else:
mfcc_df["Emotion"]="Other"
if(sample_data_4[i][14:].split("_")[0] == 'OAF'):
mfcc_df["Gender"]="Male"
else:
mfcc_df["Gender"]="Female"
sample_data_mfcc_features_4 = sample_data_mfcc_features_4.append(mfcc_df)
# Save to CSV
sample_data_mfcc_features_4.to_csv('Audio Features/sample_data_mfcc_features_4.csv',index= False, mode='w', header=True)
# Load Features
features_4 = pd.read_csv('Audio Features/sample_data_mfcc_features_4.csv')
features_4
| 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | Emotion | Gender | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 1.220228 | -6.288021 | -23.058824 | -42.426226 | -32.405139 | -2.121633 | -32.409469 | -19.628282 | -12.538443 | -8.152727 | 8.038784 | 9.805581 | -8.588909 | Anger | Male |
| 1 | -0.977203 | -4.524827 | -8.214732 | -29.952360 | -8.692126 | -8.750833 | -22.322727 | -14.788581 | -4.259685 | -10.710336 | -10.794590 | -6.866066 | -7.855454 | Disgust | Male |
| 2 | 0.054579 | -3.640480 | -8.949627 | -38.282103 | -26.989143 | -16.127692 | -31.411542 | -17.400726 | -23.480958 | -4.422475 | -20.796203 | -9.274272 | -12.435063 | Fear | Male |
| 3 | -0.666131 | -4.405894 | -1.385126 | -29.719358 | -15.141410 | 0.250221 | -31.969870 | -16.251581 | -9.591181 | -11.897906 | -3.236711 | -10.320902 | -0.489421 | Happiness | Male |
| 4 | -2.182739 | -4.278151 | -1.558268 | -21.337291 | -21.234993 | -10.585688 | -17.260342 | -23.917076 | -15.028336 | -1.602557 | -13.839702 | -1.707235 | -11.335032 | Neutral | Male |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 2795 | 1.889912 | -9.972459 | -19.383733 | -16.514340 | 3.237600 | -40.164680 | -21.195187 | -36.141423 | -33.244664 | -11.505766 | -13.660636 | 4.744601 | -3.159517 | Fear | Female |
| 2796 | 3.691365 | -16.734092 | -30.044690 | -8.298291 | -18.335224 | -40.469660 | -37.680667 | -54.758465 | 0.051854 | -16.442842 | -20.674230 | 6.932490 | -33.305323 | Happiness | Female |
| 2797 | 0.734736 | -11.863582 | -6.351481 | -8.360497 | -4.854154 | -32.655983 | -13.512118 | -33.856513 | -16.572519 | -2.119042 | -19.188326 | -4.721083 | -16.395590 | Neutral | Female |
| 2798 | 0.830298 | -14.009006 | -14.546528 | -22.484116 | -20.190596 | -34.711502 | -20.611750 | -41.547508 | -24.357789 | -9.737252 | -4.848874 | -0.929159 | -17.349877 | Surprise | Female |
| 2799 | 1.380444 | -14.983023 | 12.742822 | -0.159556 | -5.243630 | -32.397391 | -11.955386 | -27.468668 | -13.824717 | -9.303697 | -8.919692 | 1.806700 | -19.386223 | Sadness | Female |
2800 rows × 15 columns
sample_data_mfcc_features_5 = pd.DataFrame()
for i in notebook.tqdm(range(len(sample_data_5))):
data, sampling_rate = librosa.load(sample_data_5[i])
# Get MFCC Features
mfcc_features = mfcc(data,sampling_rate,winlen=30,nfft=661500)
# Convert to DataFrame
mfcc_df = pd.DataFrame(mfcc_features)
if(sample_data_5[i][26:].split('-')[2] == '05'):
mfcc_df["Emotion"]="Anger"
elif(sample_data_5[i][26:].split('-')[2] == '07'):
mfcc_df["Emotion"]="Disgust"
elif(sample_data_5[i][26:].split('-')[2] == '06'):
mfcc_df["Emotion"]="Fear"
elif(sample_data_5[i][26:].split('-')[2] == '03'):
mfcc_df["Emotion"]="Happiness"
elif(sample_data_5[i][26:].split('-')[2] == '01'):
mfcc_df["Emotion"]="Neutral"
elif(sample_data_5[i][26:].split('-')[2] == '08'):
mfcc_df["Emotion"]="Surprise"
elif(sample_data_5[i][26:].split('-')[2] == '04'):
mfcc_df["Emotion"]="Sadness"
else:
mfcc_df["Emotion"]="Other"
if(int(sample_data_5[i][26:].split('-')[6][:-4])%2 == 1):
mfcc_df["Gender"]="Male"
else:
mfcc_df["Gender"]="Female"
sample_data_mfcc_features_5 = sample_data_mfcc_features_5.append(mfcc_df)
# Save to CSV
sample_data_mfcc_features_5.to_csv('Audio Features/sample_data_mfcc_features_5.csv',index= False, mode='w', header=True)
# Load Features
features_5 = pd.read_csv('Audio Features/sample_data_mfcc_features_5.csv')
features_5
| 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | Emotion | Gender | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | -3.359473 | -2.146878 | -11.564124 | 1.891171 | -9.008204 | -10.801787 | -10.833871 | -15.028368 | -8.398560 | -9.163281 | 10.721436 | 4.608740 | 1.473425 | Neutral | Male |
| 1 | -3.329815 | -2.676908 | -7.403491 | 0.345950 | -9.717465 | -12.311797 | -5.109952 | -14.550771 | -11.636229 | -8.670857 | 10.890765 | 3.436975 | 2.016244 | Neutral | Male |
| 2 | -2.444367 | -7.687208 | -3.431780 | 0.146838 | -24.630812 | -7.976574 | -3.713535 | -15.636945 | -3.625836 | -14.900788 | 8.883719 | 8.981573 | 6.187388 | Neutral | Male |
| 3 | -2.511114 | -8.663157 | -2.275137 | -2.383363 | -22.228988 | -12.477076 | 0.356150 | -11.706877 | -1.475497 | -11.278868 | 8.459507 | 14.609624 | 1.657197 | Neutral | Male |
| 4 | -3.891415 | -5.680255 | -3.246147 | -0.504579 | -6.481897 | -0.268197 | -19.338007 | -15.519278 | -4.290236 | -7.981147 | 7.386760 | 1.352189 | 0.599136 | Other | Male |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 1435 | -0.020836 | -16.578694 | -20.212497 | -35.196251 | -41.204122 | -11.407926 | -28.910504 | -26.953338 | -28.814963 | -18.164941 | -22.130170 | -20.616013 | -21.943507 | Surprise | Female |
| 1436 | 0.895214 | -14.870234 | -24.218205 | -33.666994 | -30.253341 | -31.812652 | -12.578500 | -27.321653 | -18.336286 | -10.580679 | -31.383302 | -8.684033 | -16.206074 | Surprise | Female |
| 1437 | 0.696340 | -15.306912 | -23.957822 | -27.642590 | -27.104280 | -34.365583 | -12.110144 | -30.969206 | -18.769509 | -5.478475 | -22.061271 | -1.286473 | -12.172666 | Surprise | Female |
| 1438 | 2.295254 | -27.182203 | -12.448132 | -28.649016 | -35.397557 | -14.519178 | -28.478788 | -27.172286 | -24.487222 | -20.706953 | -13.494822 | -9.909802 | -19.705105 | Surprise | Female |
| 1439 | 1.574269 | -20.212374 | -5.617051 | -38.268733 | -36.260999 | -8.539351 | -21.161402 | -15.748326 | -30.735166 | -22.535320 | -13.359412 | -3.140330 | -1.425395 | Surprise | Female |
1440 rows × 15 columns
# Name of all 5 Datasets
frames = [features_1, features_2, features_3, features_4, features_5]
# Combined Dataset
combined = pd.concat(frames)
combined = combined.reset_index(drop=True)
# Save to CSV
combined.to_csv('Audio Features/combined_features.csv',index= False, mode='w', header=True)
combined
| 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | Emotion | Gender | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 3.203335 | 13.795782 | -19.908137 | 23.113571 | -41.785224 | 5.834210 | -52.924076 | -2.088703 | -14.617876 | 4.936264 | -15.795315 | 11.496223 | -4.460716 | Anger | Male |
| 1 | 3.446474 | 23.732635 | -30.598307 | 4.388828 | -28.666846 | 13.383600 | -52.968102 | 4.408833 | 2.814173 | -10.741523 | -14.331456 | 11.751246 | -8.396209 | Anger | Male |
| 2 | 3.476206 | 17.949951 | -30.643702 | 4.392290 | -35.739255 | -8.915932 | -46.324606 | 6.564549 | -7.061853 | -16.553248 | -16.938792 | 8.475676 | -20.742700 | Anger | Male |
| 3 | 3.558089 | 18.276150 | -20.346620 | -1.583971 | -30.147299 | 0.963494 | -54.361284 | -9.811369 | -10.147195 | -11.144327 | -1.306936 | 7.505639 | 0.572708 | Anger | Male |
| 4 | 3.764346 | 18.764859 | -24.920929 | 1.265585 | -34.814864 | -3.281257 | -50.514474 | -1.195924 | -6.026913 | -8.751210 | -12.911233 | 8.015905 | -6.634611 | Anger | Male |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 12692 | -0.020836 | -16.578694 | -20.212497 | -35.196251 | -41.204122 | -11.407926 | -28.910504 | -26.953338 | -28.814963 | -18.164941 | -22.130170 | -20.616013 | -21.943507 | Surprise | Female |
| 12693 | 0.895214 | -14.870234 | -24.218205 | -33.666994 | -30.253341 | -31.812652 | -12.578500 | -27.321653 | -18.336286 | -10.580679 | -31.383302 | -8.684033 | -16.206074 | Surprise | Female |
| 12694 | 0.696340 | -15.306912 | -23.957822 | -27.642590 | -27.104280 | -34.365583 | -12.110144 | -30.969206 | -18.769509 | -5.478475 | -22.061271 | -1.286473 | -12.172666 | Surprise | Female |
| 12695 | 2.295254 | -27.182203 | -12.448132 | -28.649016 | -35.397557 | -14.519178 | -28.478788 | -27.172286 | -24.487222 | -20.706953 | -13.494822 | -9.909802 | -19.705105 | Surprise | Female |
| 12696 | 1.574269 | -20.212374 | -5.617051 | -38.268733 | -36.260999 | -8.539351 | -21.161402 | -15.748326 | -30.735166 | -22.535320 | -13.359412 | -3.140330 | -1.425395 | Surprise | Female |
12697 rows × 15 columns
# Plotting
plt.style.use('fivethirtyeight')
combined=pd.read_csv('Audio Features/combined_features.csv')
df_filtered = combined[combined['Emotion'] != "Boredom"]
combined = df_filtered[df_filtered['Emotion'] != "Other"]
figure(figsize=(14, 6), dpi=80)
plt.title('Count of Emotions', size=25)
sns.countplot(combined['Emotion'])
plt.ylabel('Count', size=14)
plt.xlabel('Emotions', size=14)
sns.despine(top=True, right=True, left=False, bottom=False)
plt.show()
# Plotting
figure(figsize=(7, 6), dpi=80)
plt.title('Count of Gender', size=25)
sns.countplot(combined['Gender'])
plt.ylabel('Count', size=14)
plt.xlabel('Genders', size=14)
sns.despine(top=True, right=True, left=False, bottom=False)
plt.show()
# Random Over Sampling
X = combined.drop(['Emotion'], axis = 1)
y = combined['Emotion']
ros = RandomOverSampler(random_state=64)
X_resampled, y_resampled = ros.fit_resample(X, y)
# Finding Frequency of each Class before and After Random Over Sampling
counter_1 = collections.Counter(y)
counter_2 = collections.Counter(y_resampled)
# Before Sampling
plt.figure()
fig = plt.gcf()
fig.set_size_inches(24, 5)
plt.subplot(1, 2, 1)
plt.bar(counter_1.keys(), counter_1.values())
plt.xticks([0,1,2,3,4,5,6])
plt.title("Before Sampling")
# After Sampling
plt.figure()
fig = plt.gcf()
fig.set_size_inches(24, 5)
plt.subplot(1, 2, 2)
plt.bar(counter_2.keys(), counter_2.values(), color = 'green')
plt.xticks([0,1,2,3,4,5,6])
plt.title("After Sampling")
plt.subplots_adjust(top=0.92, bottom=0.08, left=0.10, right=0.95, hspace=0.5,wspace=0.35)
plt.show()
# Scaling
y1 = y_resampled
y2 = X_resampled["Gender"]
X = X_resampled.drop(["Gender"],axis=1)
scaler = MinMaxScaler()
X_scaled = scaler.fit_transform(X)
print(y1.shape)
print(y2.shape)
print(X_scaled.shape)
print(X_scaled)
(14350,) (14350,) (14350, 13) [[0.93610433 0.65309955 0.62192149 ... 0.60099951 0.52473204 0.56745201] [0.94190358 0.7893224 0.53968236 ... 0.61542485 0.52713997 0.51623315] [0.94261276 0.71004844 0.53933314 ... 0.58973133 0.49621191 0.35554854] ... [0.90833302 0.15584516 0.68150021 ... 0.7242391 0.34603792 0.46732237] [0.89425745 0.28430183 0.62634095 ... 0.56568075 0.33727729 0.38896452] [0.83385165 0.40645843 0.78249741 ... 0.69382561 0.39112332 0.49386858]]
# Emotions
encoder_1 = preprocessing.LabelEncoder()
y1_encoded = encoder_1.fit_transform(y1)
# Gender
encoder_2 = preprocessing.LabelEncoder()
y2_encoded = encoder_2.fit_transform(y2)
labels=[]
i=0
while i<len(y1_encoded):
label=[]
label.append([y1_encoded[i]])
label.append([y2_encoded[i]])
labels.append(label)
i+=1
labels = np.array(labels)
X_train, X_test, Y_train, Y_test= train_test_split(X_scaled, labels, test_size=0.2, random_state = 64)
Y_train = [Y_train[:,1],Y_train[:,0]]
Y_test = [Y_test[:,1],Y_test[:,0]]
X_train = X_train
X_test = X_test
y1_train = Y_train[0]
y1_test = Y_test[0]
y2_train = Y_train[1]
y2_test = Y_test[1]
print(X_train.shape)
print(X_test.shape)
print(y1_train.shape)
print(y1_test.shape)
print(y2_train.shape)
print(y2_test.shape)
(11480, 13) (2870, 13) (11480, 1) (2870, 1) (11480, 1) (2870, 1)
df = pd.DataFrame(y1_train)
df["1"] = y2_train
y_train = np.array(df)
print(y_train)
[[0 4] [1 4] [0 4] ... [1 4] [1 3] [1 1]]
df = pd.DataFrame(y1_test)
df["1"] = y2_test
y_test = np.array(df)
print(y_test)
[[0 1] [0 4] [1 6] ... [1 6] [1 6] [1 6]]
x_train, x_test, y_train, y_test = X_train, X_test, y1_train, y1_test
size = 13
x_train = np.array(x_train)
y_train = np.array(y_train)
x_test = np.array(x_test)
y_test = np.array(y_test)
x_train = x_train.reshape(x_train.shape[0], size, 1)
x_test = x_test.reshape(x_test.shape[0], size, 1)
y_train = to_categorical(y_train)
y_test = to_categorical(y_test)
# Create Model
model_1_g=Sequential()
model_1_g.add(Conv1D(256, kernel_size=5, strides=1, padding='same', activation='relu', input_shape=(x_train.shape[1], 1)))
model_1_g.add(MaxPooling1D(pool_size=5, strides = 2, padding = 'same'))
model_1_g.add(Conv1D(256, kernel_size=5, strides=1, padding='same', activation='relu'))
model_1_g.add(MaxPooling1D(pool_size=5, strides = 2, padding = 'same'))
model_1_g.add(Conv1D(128, kernel_size=5, strides=1, padding='same', activation='relu'))
model_1_g.add(MaxPooling1D(pool_size=5, strides = 2, padding = 'same'))
model_1_g.add(Dropout(0.2))
model_1_g.add(Conv1D(64, kernel_size=5, strides=1, padding='same', activation='relu'))
model_1_g.add(MaxPooling1D(pool_size=5, strides = 2, padding = 'same'))
model_1_g.add(Flatten())
model_1_g.add(Dense(units=32, activation='relu'))
model_1_g.add(Dropout(0.3))
model_1_g.add(Dense(units=2, activation='softmax'))
model_1_g.compile(optimizer = 'adam' , loss = 'categorical_crossentropy' , metrics = ['accuracy'])
model_1_g.summary()
Model: "sequential" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= conv1d (Conv1D) (None, 13, 256) 1536 _________________________________________________________________ max_pooling1d (MaxPooling1D) (None, 7, 256) 0 _________________________________________________________________ conv1d_1 (Conv1D) (None, 7, 256) 327936 _________________________________________________________________ max_pooling1d_1 (MaxPooling1 (None, 4, 256) 0 _________________________________________________________________ conv1d_2 (Conv1D) (None, 4, 128) 163968 _________________________________________________________________ max_pooling1d_2 (MaxPooling1 (None, 2, 128) 0 _________________________________________________________________ dropout (Dropout) (None, 2, 128) 0 _________________________________________________________________ conv1d_3 (Conv1D) (None, 2, 64) 41024 _________________________________________________________________ max_pooling1d_3 (MaxPooling1 (None, 1, 64) 0 _________________________________________________________________ flatten (Flatten) (None, 64) 0 _________________________________________________________________ dense (Dense) (None, 32) 2080 _________________________________________________________________ dropout_1 (Dropout) (None, 32) 0 _________________________________________________________________ dense_1 (Dense) (None, 2) 66 ================================================================= Total params: 536,610 Trainable params: 536,610 Non-trainable params: 0 _________________________________________________________________
# Training
rlrp = ReduceLROnPlateau(monitor='loss', factor=0.4, verbose=0, patience=2, min_lr=0.0000001)
history_1_g = model_1_g.fit(x_train, y_train, batch_size=64, epochs=250, validation_data=(x_test, y_test), callbacks=[rlrp])
Epoch 1/250 180/180 [==============================] - 10s 56ms/step - loss: 0.5573 - accuracy: 0.6899 - val_loss: 0.4421 - val_accuracy: 0.7899 Epoch 2/250 180/180 [==============================] - 10s 55ms/step - loss: 0.4240 - accuracy: 0.7861 - val_loss: 0.4244 - val_accuracy: 0.7969 Epoch 3/250 180/180 [==============================] - 10s 55ms/step - loss: 0.4083 - accuracy: 0.7979 - val_loss: 0.4340 - val_accuracy: 0.7686 Epoch 4/250 180/180 [==============================] - 10s 56ms/step - loss: 0.3810 - accuracy: 0.8166 - val_loss: 0.3575 - val_accuracy: 0.8307 Epoch 5/250 180/180 [==============================] - 10s 55ms/step - loss: 0.3677 - accuracy: 0.8222 - val_loss: 0.3520 - val_accuracy: 0.8383 Epoch 6/250 180/180 [==============================] - 10s 56ms/step - loss: 0.3534 - accuracy: 0.8322 - val_loss: 0.3427 - val_accuracy: 0.8404 Epoch 7/250 180/180 [==============================] - 10s 56ms/step - loss: 0.3352 - accuracy: 0.8427 - val_loss: 0.3429 - val_accuracy: 0.8467 Epoch 8/250 180/180 [==============================] - 11s 59ms/step - loss: 0.3403 - accuracy: 0.8414 - val_loss: 0.3279 - val_accuracy: 0.8415 Epoch 9/250 180/180 [==============================] - 12s 67ms/step - loss: 0.3229 - accuracy: 0.8517 - val_loss: 0.3347 - val_accuracy: 0.8436 Epoch 10/250 180/180 [==============================] - 14s 76ms/step - loss: 0.2996 - accuracy: 0.8637 - val_loss: 0.3083 - val_accuracy: 0.8585 Epoch 11/250 180/180 [==============================] - 13s 70ms/step - loss: 0.3009 - accuracy: 0.8645 - val_loss: 0.2922 - val_accuracy: 0.8697 Epoch 12/250 180/180 [==============================] - 13s 71ms/step - loss: 0.2871 - accuracy: 0.8683 - val_loss: 0.2838 - val_accuracy: 0.8690 Epoch 13/250 180/180 [==============================] - 14s 77ms/step - loss: 0.2882 - accuracy: 0.8727 - val_loss: 0.3297 - val_accuracy: 0.8652 Epoch 14/250 180/180 [==============================] - 14s 78ms/step - loss: 0.2768 - accuracy: 0.8791 - val_loss: 0.2648 - val_accuracy: 0.8843 Epoch 15/250 180/180 [==============================] - 14s 76ms/step - loss: 0.2753 - accuracy: 0.8740 - val_loss: 0.2887 - val_accuracy: 0.8683 Epoch 16/250 180/180 [==============================] - 14s 77ms/step - loss: 0.2709 - accuracy: 0.8832 - val_loss: 0.2975 - val_accuracy: 0.8571 Epoch 17/250 180/180 [==============================] - 12s 69ms/step - loss: 0.2571 - accuracy: 0.8848 - val_loss: 0.2718 - val_accuracy: 0.8749 Epoch 18/250 180/180 [==============================] - 12s 67ms/step - loss: 0.2631 - accuracy: 0.8834 - val_loss: 0.2569 - val_accuracy: 0.8861 Epoch 19/250 180/180 [==============================] - 15s 81ms/step - loss: 0.2520 - accuracy: 0.8877 - val_loss: 0.2881 - val_accuracy: 0.8753 Epoch 20/250 180/180 [==============================] - 13s 70ms/step - loss: 0.2388 - accuracy: 0.8948 - val_loss: 0.2514 - val_accuracy: 0.8899 Epoch 21/250 180/180 [==============================] - 13s 73ms/step - loss: 0.2460 - accuracy: 0.8939 - val_loss: 0.2631 - val_accuracy: 0.8794 Epoch 22/250 180/180 [==============================] - 12s 68ms/step - loss: 0.2373 - accuracy: 0.8940 - val_loss: 0.2447 - val_accuracy: 0.8913 Epoch 23/250 180/180 [==============================] - 13s 73ms/step - loss: 0.2279 - accuracy: 0.9015 - val_loss: 0.2647 - val_accuracy: 0.8854 Epoch 24/250 180/180 [==============================] - 17s 93ms/step - loss: 0.2291 - accuracy: 0.8990 - val_loss: 0.2604 - val_accuracy: 0.8847 Epoch 25/250 180/180 [==============================] - 13s 72ms/step - loss: 0.2376 - accuracy: 0.8954 - val_loss: 0.2446 - val_accuracy: 0.8913 Epoch 26/250 180/180 [==============================] - 13s 74ms/step - loss: 0.1940 - accuracy: 0.9184 - val_loss: 0.2263 - val_accuracy: 0.9059 Epoch 27/250 180/180 [==============================] - 13s 72ms/step - loss: 0.1835 - accuracy: 0.9247 - val_loss: 0.2152 - val_accuracy: 0.9136 Epoch 28/250 180/180 [==============================] - 13s 71ms/step - loss: 0.1807 - accuracy: 0.9251 - val_loss: 0.2156 - val_accuracy: 0.9122 Epoch 29/250 180/180 [==============================] - 12s 69ms/step - loss: 0.1719 - accuracy: 0.9289 - val_loss: 0.3041 - val_accuracy: 0.8826 Epoch 30/250 180/180 [==============================] - 13s 71ms/step - loss: 0.1729 - accuracy: 0.9270 - val_loss: 0.2155 - val_accuracy: 0.9118 Epoch 31/250 180/180 [==============================] - 13s 72ms/step - loss: 0.1695 - accuracy: 0.9298 - val_loss: 0.2265 - val_accuracy: 0.9049 Epoch 32/250 180/180 [==============================] - 12s 69ms/step - loss: 0.1619 - accuracy: 0.9308 - val_loss: 0.2337 - val_accuracy: 0.9098 Epoch 33/250 180/180 [==============================] - 13s 72ms/step - loss: 0.1694 - accuracy: 0.9285 - val_loss: 0.2032 - val_accuracy: 0.9192 Epoch 34/250 180/180 [==============================] - 14s 76ms/step - loss: 0.1587 - accuracy: 0.9341 - val_loss: 0.2101 - val_accuracy: 0.9195 Epoch 35/250 180/180 [==============================] - 14s 77ms/step - loss: 0.1630 - accuracy: 0.9293 - val_loss: 0.2216 - val_accuracy: 0.9157 Epoch 36/250 180/180 [==============================] - 14s 76ms/step - loss: 0.1509 - accuracy: 0.9376 - val_loss: 0.2167 - val_accuracy: 0.9199 Epoch 37/250 180/180 [==============================] - 14s 75ms/step - loss: 0.1567 - accuracy: 0.9325 - val_loss: 0.2065 - val_accuracy: 0.9206 Epoch 38/250 180/180 [==============================] - 14s 78ms/step - loss: 0.1437 - accuracy: 0.9417 - val_loss: 0.2107 - val_accuracy: 0.9188 Epoch 39/250 180/180 [==============================] - 12s 68ms/step - loss: 0.1422 - accuracy: 0.9408 - val_loss: 0.2045 - val_accuracy: 0.9226 Epoch 40/250 180/180 [==============================] - 12s 67ms/step - loss: 0.1476 - accuracy: 0.9380 - val_loss: 0.2188 - val_accuracy: 0.9174 Epoch 41/250 180/180 [==============================] - 14s 80ms/step - loss: 0.1321 - accuracy: 0.9446 - val_loss: 0.2632 - val_accuracy: 0.9059 Epoch 42/250 180/180 [==============================] - 14s 80ms/step - loss: 0.1424 - accuracy: 0.9399 - val_loss: 0.2094 - val_accuracy: 0.9226 Epoch 43/250 180/180 [==============================] - 13s 71ms/step - loss: 0.1362 - accuracy: 0.9446 - val_loss: 0.2144 - val_accuracy: 0.9223 Epoch 44/250 180/180 [==============================] - 13s 71ms/step - loss: 0.1125 - accuracy: 0.9565 - val_loss: 0.2079 - val_accuracy: 0.9289 Epoch 45/250 180/180 [==============================] - 13s 73ms/step - loss: 0.1057 - accuracy: 0.9578 - val_loss: 0.2239 - val_accuracy: 0.9216 Epoch 46/250 180/180 [==============================] - 13s 74ms/step - loss: 0.1079 - accuracy: 0.9562 - val_loss: 0.2100 - val_accuracy: 0.9265 Epoch 47/250 180/180 [==============================] - 13s 74ms/step - loss: 0.0961 - accuracy: 0.9619 - val_loss: 0.2225 - val_accuracy: 0.9268 Epoch 48/250 180/180 [==============================] - 13s 70ms/step - loss: 0.0961 - accuracy: 0.9617 - val_loss: 0.2259 - val_accuracy: 0.9265 Epoch 49/250 180/180 [==============================] - 13s 74ms/step - loss: 0.0986 - accuracy: 0.9613 - val_loss: 0.2189 - val_accuracy: 0.9279 Epoch 50/250 180/180 [==============================] - 14s 76ms/step - loss: 0.0865 - accuracy: 0.9672 - val_loss: 0.2381 - val_accuracy: 0.9237 Epoch 51/250 180/180 [==============================] - 13s 74ms/step - loss: 0.0852 - accuracy: 0.9672 - val_loss: 0.2304 - val_accuracy: 0.9254 Epoch 52/250 180/180 [==============================] - 13s 71ms/step - loss: 0.0875 - accuracy: 0.9658 - val_loss: 0.2238 - val_accuracy: 0.9293 Epoch 53/250 180/180 [==============================] - 13s 74ms/step - loss: 0.0824 - accuracy: 0.9671 - val_loss: 0.2365 - val_accuracy: 0.9247 Epoch 54/250 180/180 [==============================] - 13s 69ms/step - loss: 0.0814 - accuracy: 0.9691 - val_loss: 0.2343 - val_accuracy: 0.9317 Epoch 55/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0799 - accuracy: 0.9709 - val_loss: 0.2384 - val_accuracy: 0.9300 Epoch 56/250 180/180 [==============================] - 13s 71ms/step - loss: 0.0796 - accuracy: 0.9696 - val_loss: 0.2447 - val_accuracy: 0.9296 Epoch 57/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0800 - accuracy: 0.9697 - val_loss: 0.2565 - val_accuracy: 0.9265 Epoch 58/250 180/180 [==============================] - 13s 71ms/step - loss: 0.0778 - accuracy: 0.9703 - val_loss: 0.2373 - val_accuracy: 0.9296 Epoch 59/250 180/180 [==============================] - 13s 73ms/step - loss: 0.0775 - accuracy: 0.9699 - val_loss: 0.2394 - val_accuracy: 0.9334 Epoch 60/250 180/180 [==============================] - 13s 74ms/step - loss: 0.0780 - accuracy: 0.9705 - val_loss: 0.2406 - val_accuracy: 0.9307 Epoch 61/250 180/180 [==============================] - 13s 72ms/step - loss: 0.0761 - accuracy: 0.9700 - val_loss: 0.2445 - val_accuracy: 0.9303 Epoch 62/250 180/180 [==============================] - 13s 72ms/step - loss: 0.0782 - accuracy: 0.9713 - val_loss: 0.2361 - val_accuracy: 0.9286 Epoch 63/250 180/180 [==============================] - 13s 73ms/step - loss: 0.0728 - accuracy: 0.9731 - val_loss: 0.2473 - val_accuracy: 0.9310 Epoch 64/250 180/180 [==============================] - 13s 73ms/step - loss: 0.0732 - accuracy: 0.9715 - val_loss: 0.2459 - val_accuracy: 0.9310 Epoch 65/250 180/180 [==============================] - 13s 74ms/step - loss: 0.0698 - accuracy: 0.9733 - val_loss: 0.2686 - val_accuracy: 0.9230 Epoch 66/250 180/180 [==============================] - 13s 71ms/step - loss: 0.0718 - accuracy: 0.9734 - val_loss: 0.2490 - val_accuracy: 0.9321 Epoch 67/250 180/180 [==============================] - 13s 71ms/step - loss: 0.0720 - accuracy: 0.9732 - val_loss: 0.2541 - val_accuracy: 0.9317 Epoch 68/250 180/180 [==============================] - 12s 66ms/step - loss: 0.0654 - accuracy: 0.9757 - val_loss: 0.2536 - val_accuracy: 0.9321 Epoch 69/250 180/180 [==============================] - 13s 73ms/step - loss: 0.0647 - accuracy: 0.9755 - val_loss: 0.2608 - val_accuracy: 0.9265 Epoch 70/250 180/180 [==============================] - 13s 71ms/step - loss: 0.0634 - accuracy: 0.9776 - val_loss: 0.2566 - val_accuracy: 0.9317 Epoch 71/250 180/180 [==============================] - 13s 74ms/step - loss: 0.0657 - accuracy: 0.9760 - val_loss: 0.2609 - val_accuracy: 0.9324 Epoch 72/250 180/180 [==============================] - 14s 76ms/step - loss: 0.0635 - accuracy: 0.9774 - val_loss: 0.2579 - val_accuracy: 0.9324 Epoch 73/250 180/180 [==============================] - 13s 72ms/step - loss: 0.0612 - accuracy: 0.9780 - val_loss: 0.2612 - val_accuracy: 0.9303 Epoch 74/250 180/180 [==============================] - 13s 74ms/step - loss: 0.0604 - accuracy: 0.9792 - val_loss: 0.2620 - val_accuracy: 0.9314 Epoch 75/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0600 - accuracy: 0.9790 - val_loss: 0.2633 - val_accuracy: 0.9317 Epoch 76/250 180/180 [==============================] - 13s 71ms/step - loss: 0.0610 - accuracy: 0.9787 - val_loss: 0.2618 - val_accuracy: 0.9321 Epoch 77/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0607 - accuracy: 0.9787 - val_loss: 0.2628 - val_accuracy: 0.9317 Epoch 78/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0599 - accuracy: 0.9793 - val_loss: 0.2630 - val_accuracy: 0.9314 Epoch 79/250 180/180 [==============================] - 13s 74ms/step - loss: 0.0607 - accuracy: 0.9780 - val_loss: 0.2623 - val_accuracy: 0.9317 Epoch 80/250 180/180 [==============================] - 13s 71ms/step - loss: 0.0613 - accuracy: 0.9784 - val_loss: 0.2637 - val_accuracy: 0.9310 Epoch 81/250 180/180 [==============================] - 13s 72ms/step - loss: 0.0587 - accuracy: 0.9801 - val_loss: 0.2638 - val_accuracy: 0.9324 Epoch 82/250 180/180 [==============================] - 14s 80ms/step - loss: 0.0593 - accuracy: 0.9798 - val_loss: 0.2636 - val_accuracy: 0.9324 Epoch 83/250 180/180 [==============================] - 13s 71ms/step - loss: 0.0590 - accuracy: 0.9799 - val_loss: 0.2635 - val_accuracy: 0.9321 Epoch 84/250 180/180 [==============================] - 13s 74ms/step - loss: 0.0601 - accuracy: 0.9792 - val_loss: 0.2637 - val_accuracy: 0.9324 Epoch 85/250 180/180 [==============================] - 13s 73ms/step - loss: 0.0586 - accuracy: 0.9787 - val_loss: 0.2636 - val_accuracy: 0.9321 Epoch 86/250 180/180 [==============================] - 15s 81ms/step - loss: 0.0595 - accuracy: 0.9791 - val_loss: 0.2637 - val_accuracy: 0.9324 Epoch 87/250 180/180 [==============================] - 13s 72ms/step - loss: 0.0589 - accuracy: 0.9788 - val_loss: 0.2638 - val_accuracy: 0.9328 Epoch 88/250 180/180 [==============================] - 16s 87ms/step - loss: 0.0588 - accuracy: 0.9794 - val_loss: 0.2637 - val_accuracy: 0.9328 Epoch 89/250 180/180 [==============================] - 12s 67ms/step - loss: 0.0601 - accuracy: 0.9792 - val_loss: 0.2637 - val_accuracy: 0.9328 Epoch 90/250 180/180 [==============================] - 13s 73ms/step - loss: 0.0598 - accuracy: 0.9774 - val_loss: 0.2638 - val_accuracy: 0.9328 Epoch 91/250 180/180 [==============================] - 17s 97ms/step - loss: 0.0588 - accuracy: 0.9795 - val_loss: 0.2637 - val_accuracy: 0.9328 Epoch 92/250 180/180 [==============================] - 14s 75ms/step - loss: 0.0592 - accuracy: 0.9790 - val_loss: 0.2638 - val_accuracy: 0.9328 Epoch 93/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0595 - accuracy: 0.9803 - val_loss: 0.2638 - val_accuracy: 0.9328 Epoch 94/250 180/180 [==============================] - 14s 76ms/step - loss: 0.0598 - accuracy: 0.9800 - val_loss: 0.2638 - val_accuracy: 0.9328 Epoch 95/250 180/180 [==============================] - 13s 73ms/step - loss: 0.0582 - accuracy: 0.9801 - val_loss: 0.2638 - val_accuracy: 0.9328 Epoch 96/250 180/180 [==============================] - 13s 73ms/step - loss: 0.0587 - accuracy: 0.9794 - val_loss: 0.2639 - val_accuracy: 0.9324 Epoch 97/250 180/180 [==============================] - 13s 71ms/step - loss: 0.0593 - accuracy: 0.9806 - val_loss: 0.2639 - val_accuracy: 0.9324 Epoch 98/250 180/180 [==============================] - 13s 71ms/step - loss: 0.0593 - accuracy: 0.9794 - val_loss: 0.2639 - val_accuracy: 0.9328 Epoch 99/250 180/180 [==============================] - 13s 73ms/step - loss: 0.0603 - accuracy: 0.9795 - val_loss: 0.2639 - val_accuracy: 0.9324 Epoch 100/250 180/180 [==============================] - 13s 75ms/step - loss: 0.0586 - accuracy: 0.9800 - val_loss: 0.2639 - val_accuracy: 0.9324 Epoch 101/250 180/180 [==============================] - 13s 71ms/step - loss: 0.0584 - accuracy: 0.9794 - val_loss: 0.2639 - val_accuracy: 0.9324 Epoch 102/250 180/180 [==============================] - 15s 82ms/step - loss: 0.0594 - accuracy: 0.9786 - val_loss: 0.2639 - val_accuracy: 0.9324 Epoch 103/250 180/180 [==============================] - 13s 73ms/step - loss: 0.0581 - accuracy: 0.9794 - val_loss: 0.2638 - val_accuracy: 0.9328 Epoch 104/250 180/180 [==============================] - 13s 74ms/step - loss: 0.0595 - accuracy: 0.9801 - val_loss: 0.2639 - val_accuracy: 0.9328 Epoch 105/250 180/180 [==============================] - 12s 69ms/step - loss: 0.0581 - accuracy: 0.9800 - val_loss: 0.2638 - val_accuracy: 0.9328 Epoch 106/250 180/180 [==============================] - 13s 73ms/step - loss: 0.0578 - accuracy: 0.9800 - val_loss: 0.2639 - val_accuracy: 0.9328 Epoch 107/250 180/180 [==============================] - 15s 82ms/step - loss: 0.0575 - accuracy: 0.9801 - val_loss: 0.2639 - val_accuracy: 0.9328 Epoch 108/250 180/180 [==============================] - 13s 70ms/step - loss: 0.0578 - accuracy: 0.9796 - val_loss: 0.2638 - val_accuracy: 0.9328 Epoch 109/250 180/180 [==============================] - 14s 75ms/step - loss: 0.0583 - accuracy: 0.9799 - val_loss: 0.2639 - val_accuracy: 0.9328 Epoch 110/250 180/180 [==============================] - 15s 82ms/step - loss: 0.0593 - accuracy: 0.9795 - val_loss: 0.2639 - val_accuracy: 0.9328 Epoch 111/250 180/180 [==============================] - 17s 93ms/step - loss: 0.0587 - accuracy: 0.9807 - val_loss: 0.2639 - val_accuracy: 0.9328 Epoch 112/250 180/180 [==============================] - 16s 86ms/step - loss: 0.0583 - accuracy: 0.9802 - val_loss: 0.2639 - val_accuracy: 0.9328 Epoch 113/250 180/180 [==============================] - 15s 82ms/step - loss: 0.0578 - accuracy: 0.9803 - val_loss: 0.2640 - val_accuracy: 0.9328 Epoch 114/250 180/180 [==============================] - 14s 76ms/step - loss: 0.0591 - accuracy: 0.9791 - val_loss: 0.2639 - val_accuracy: 0.9328 Epoch 115/250 180/180 [==============================] - 16s 90ms/step - loss: 0.0580 - accuracy: 0.9807 - val_loss: 0.2640 - val_accuracy: 0.9328 Epoch 116/250 180/180 [==============================] - 15s 82ms/step - loss: 0.0575 - accuracy: 0.9803 - val_loss: 0.2640 - val_accuracy: 0.9324 Epoch 117/250 180/180 [==============================] - 13s 75ms/step - loss: 0.0579 - accuracy: 0.9800 - val_loss: 0.2640 - val_accuracy: 0.9324 Epoch 118/250 180/180 [==============================] - 15s 82ms/step - loss: 0.0607 - accuracy: 0.9791 - val_loss: 0.2640 - val_accuracy: 0.9324 Epoch 119/250 180/180 [==============================] - 14s 76ms/step - loss: 0.0575 - accuracy: 0.9798 - val_loss: 0.2640 - val_accuracy: 0.9328 Epoch 120/250 180/180 [==============================] - 13s 73ms/step - loss: 0.0569 - accuracy: 0.9806 - val_loss: 0.2640 - val_accuracy: 0.9324 Epoch 121/250 180/180 [==============================] - 12s 69ms/step - loss: 0.0592 - accuracy: 0.9796 - val_loss: 0.2641 - val_accuracy: 0.9324 Epoch 122/250 180/180 [==============================] - 15s 82ms/step - loss: 0.0591 - accuracy: 0.9803 - val_loss: 0.2642 - val_accuracy: 0.9324 Epoch 123/250 180/180 [==============================] - 13s 73ms/step - loss: 0.0592 - accuracy: 0.9794 - val_loss: 0.2641 - val_accuracy: 0.9324 Epoch 124/250 180/180 [==============================] - 14s 78ms/step - loss: 0.0584 - accuracy: 0.9799 - val_loss: 0.2642 - val_accuracy: 0.9324 Epoch 125/250 180/180 [==============================] - 13s 73ms/step - loss: 0.0588 - accuracy: 0.9794 - val_loss: 0.2642 - val_accuracy: 0.9324 Epoch 126/250 180/180 [==============================] - 13s 73ms/step - loss: 0.0595 - accuracy: 0.9788 - val_loss: 0.2642 - val_accuracy: 0.9324 Epoch 127/250 180/180 [==============================] - 13s 70ms/step - loss: 0.0595 - accuracy: 0.9794 - val_loss: 0.2641 - val_accuracy: 0.9324 Epoch 128/250 180/180 [==============================] - 13s 71ms/step - loss: 0.0588 - accuracy: 0.9792 - val_loss: 0.2641 - val_accuracy: 0.9328 Epoch 129/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0581 - accuracy: 0.9796 - val_loss: 0.2641 - val_accuracy: 0.9324 Epoch 130/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0586 - accuracy: 0.9785 - val_loss: 0.2642 - val_accuracy: 0.9324 Epoch 131/250 180/180 [==============================] - 13s 70ms/step - loss: 0.0591 - accuracy: 0.9785 - val_loss: 0.2641 - val_accuracy: 0.9324 Epoch 132/250 180/180 [==============================] - 12s 69ms/step - loss: 0.0582 - accuracy: 0.9796 - val_loss: 0.2641 - val_accuracy: 0.9324 Epoch 133/250 180/180 [==============================] - 14s 78ms/step - loss: 0.0577 - accuracy: 0.9797 - val_loss: 0.2641 - val_accuracy: 0.9328 Epoch 134/250 180/180 [==============================] - 14s 78ms/step - loss: 0.0592 - accuracy: 0.9780 - val_loss: 0.2641 - val_accuracy: 0.9328 Epoch 135/250 180/180 [==============================] - 14s 76ms/step - loss: 0.0574 - accuracy: 0.9798 - val_loss: 0.2641 - val_accuracy: 0.9328 Epoch 136/250 180/180 [==============================] - 14s 76ms/step - loss: 0.0587 - accuracy: 0.9788 - val_loss: 0.2641 - val_accuracy: 0.9328 Epoch 137/250 180/180 [==============================] - 13s 70ms/step - loss: 0.0583 - accuracy: 0.9797 - val_loss: 0.2641 - val_accuracy: 0.9328 Epoch 138/250 180/180 [==============================] - 13s 74ms/step - loss: 0.0590 - accuracy: 0.9795 - val_loss: 0.2641 - val_accuracy: 0.9328 Epoch 139/250 180/180 [==============================] - 13s 70ms/step - loss: 0.0585 - accuracy: 0.9799 - val_loss: 0.2641 - val_accuracy: 0.9328 Epoch 140/250 180/180 [==============================] - 13s 72ms/step - loss: 0.0598 - accuracy: 0.9788 - val_loss: 0.2642 - val_accuracy: 0.9324 Epoch 141/250 180/180 [==============================] - 13s 72ms/step - loss: 0.0584 - accuracy: 0.9792 - val_loss: 0.2642 - val_accuracy: 0.9324 Epoch 142/250 180/180 [==============================] - 13s 70ms/step - loss: 0.0594 - accuracy: 0.9789 - val_loss: 0.2642 - val_accuracy: 0.9324 Epoch 143/250 180/180 [==============================] - 14s 75ms/step - loss: 0.0589 - accuracy: 0.9789 - val_loss: 0.2642 - val_accuracy: 0.9324 Epoch 144/250 180/180 [==============================] - 13s 71ms/step - loss: 0.0593 - accuracy: 0.9783 - val_loss: 0.2642 - val_accuracy: 0.9324 Epoch 145/250 180/180 [==============================] - 14s 79ms/step - loss: 0.0605 - accuracy: 0.9779 - val_loss: 0.2642 - val_accuracy: 0.9324 Epoch 146/250 180/180 [==============================] - 16s 87ms/step - loss: 0.0581 - accuracy: 0.9795 - val_loss: 0.2642 - val_accuracy: 0.9324 Epoch 147/250 180/180 [==============================] - 15s 81ms/step - loss: 0.0584 - accuracy: 0.9794 - val_loss: 0.2642 - val_accuracy: 0.9324 Epoch 148/250 180/180 [==============================] - 14s 77ms/step - loss: 0.0585 - accuracy: 0.9787 - val_loss: 0.2642 - val_accuracy: 0.9328 Epoch 149/250 180/180 [==============================] - 18s 100ms/step - loss: 0.0584 - accuracy: 0.9787 - val_loss: 0.2642 - val_accuracy: 0.9324 Epoch 150/250 180/180 [==============================] - 13s 75ms/step - loss: 0.0586 - accuracy: 0.9790 - val_loss: 0.2642 - val_accuracy: 0.9324 Epoch 151/250 180/180 [==============================] - 15s 83ms/step - loss: 0.0595 - accuracy: 0.9792 - val_loss: 0.2642 - val_accuracy: 0.9328 Epoch 152/250 180/180 [==============================] - 15s 84ms/step - loss: 0.0587 - accuracy: 0.9794 - val_loss: 0.2642 - val_accuracy: 0.9328 Epoch 153/250 180/180 [==============================] - 15s 83ms/step - loss: 0.0592 - accuracy: 0.9783 - val_loss: 0.2642 - val_accuracy: 0.9324 Epoch 154/250 180/180 [==============================] - 12s 69ms/step - loss: 0.0582 - accuracy: 0.9804 - val_loss: 0.2642 - val_accuracy: 0.9324 Epoch 155/250 180/180 [==============================] - 13s 73ms/step - loss: 0.0599 - accuracy: 0.9800 - val_loss: 0.2642 - val_accuracy: 0.9324 Epoch 156/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0585 - accuracy: 0.9805 - val_loss: 0.2642 - val_accuracy: 0.9324 Epoch 157/250 180/180 [==============================] - 12s 67ms/step - loss: 0.0584 - accuracy: 0.9794 - val_loss: 0.2642 - val_accuracy: 0.9324 Epoch 158/250 180/180 [==============================] - 13s 70ms/step - loss: 0.0567 - accuracy: 0.9801 - val_loss: 0.2642 - val_accuracy: 0.9328 Epoch 159/250 180/180 [==============================] - 12s 67ms/step - loss: 0.0578 - accuracy: 0.9790 - val_loss: 0.2642 - val_accuracy: 0.9328 Epoch 160/250 180/180 [==============================] - 13s 70ms/step - loss: 0.0579 - accuracy: 0.9797 - val_loss: 0.2642 - val_accuracy: 0.9328 Epoch 161/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0604 - accuracy: 0.9780 - val_loss: 0.2642 - val_accuracy: 0.9328 Epoch 162/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0592 - accuracy: 0.9796 - val_loss: 0.2642 - val_accuracy: 0.9328 Epoch 163/250 180/180 [==============================] - 12s 67ms/step - loss: 0.0597 - accuracy: 0.9789 - val_loss: 0.2642 - val_accuracy: 0.9328 Epoch 164/250 180/180 [==============================] - 12s 69ms/step - loss: 0.0604 - accuracy: 0.9785 - val_loss: 0.2642 - val_accuracy: 0.9328 Epoch 165/250 180/180 [==============================] - 13s 70ms/step - loss: 0.0586 - accuracy: 0.9791 - val_loss: 0.2643 - val_accuracy: 0.9328 Epoch 166/250 180/180 [==============================] - 12s 67ms/step - loss: 0.0592 - accuracy: 0.9789 - val_loss: 0.2643 - val_accuracy: 0.9324 Epoch 167/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0569 - accuracy: 0.9805 - val_loss: 0.2644 - val_accuracy: 0.9324 Epoch 168/250 180/180 [==============================] - 14s 75ms/step - loss: 0.0608 - accuracy: 0.9786 - val_loss: 0.2643 - val_accuracy: 0.9324 Epoch 169/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0574 - accuracy: 0.9798 - val_loss: 0.2644 - val_accuracy: 0.9324 Epoch 170/250 180/180 [==============================] - 12s 67ms/step - loss: 0.0580 - accuracy: 0.9791 - val_loss: 0.2643 - val_accuracy: 0.9324 Epoch 171/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0584 - accuracy: 0.9801 - val_loss: 0.2643 - val_accuracy: 0.9324 Epoch 172/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0603 - accuracy: 0.9785 - val_loss: 0.2644 - val_accuracy: 0.9321 Epoch 173/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0591 - accuracy: 0.9792 - val_loss: 0.2645 - val_accuracy: 0.9321 Epoch 174/250 180/180 [==============================] - 13s 70ms/step - loss: 0.0587 - accuracy: 0.9791 - val_loss: 0.2645 - val_accuracy: 0.9321 Epoch 175/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0581 - accuracy: 0.9794 - val_loss: 0.2645 - val_accuracy: 0.9321 Epoch 176/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0594 - accuracy: 0.9787 - val_loss: 0.2644 - val_accuracy: 0.9324 Epoch 177/250 180/180 [==============================] - 12s 69ms/step - loss: 0.0592 - accuracy: 0.9792 - val_loss: 0.2644 - val_accuracy: 0.9324 Epoch 178/250 180/180 [==============================] - 16s 89ms/step - loss: 0.0581 - accuracy: 0.9796 - val_loss: 0.2644 - val_accuracy: 0.9324 Epoch 179/250 180/180 [==============================] - 13s 73ms/step - loss: 0.0578 - accuracy: 0.9798 - val_loss: 0.2644 - val_accuracy: 0.9328 Epoch 180/250 180/180 [==============================] - 13s 70ms/step - loss: 0.0575 - accuracy: 0.9796 - val_loss: 0.2644 - val_accuracy: 0.9328 Epoch 181/250 180/180 [==============================] - 13s 70ms/step - loss: 0.0585 - accuracy: 0.9800 - val_loss: 0.2644 - val_accuracy: 0.9324 Epoch 182/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0599 - accuracy: 0.9795 - val_loss: 0.2645 - val_accuracy: 0.9324 Epoch 183/250 180/180 [==============================] - 13s 70ms/step - loss: 0.0595 - accuracy: 0.9788 - val_loss: 0.2645 - val_accuracy: 0.9324 Epoch 184/250 180/180 [==============================] - 13s 71ms/step - loss: 0.0569 - accuracy: 0.9801 - val_loss: 0.2645 - val_accuracy: 0.9324 Epoch 185/250 180/180 [==============================] - 12s 67ms/step - loss: 0.0599 - accuracy: 0.9798 - val_loss: 0.2646 - val_accuracy: 0.9324 Epoch 186/250 180/180 [==============================] - 12s 69ms/step - loss: 0.0578 - accuracy: 0.9805 - val_loss: 0.2646 - val_accuracy: 0.9324 Epoch 187/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0578 - accuracy: 0.9801 - val_loss: 0.2646 - val_accuracy: 0.9321 Epoch 188/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0567 - accuracy: 0.9800 - val_loss: 0.2646 - val_accuracy: 0.9321 Epoch 189/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0569 - accuracy: 0.9807 - val_loss: 0.2646 - val_accuracy: 0.9321 Epoch 190/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0585 - accuracy: 0.9783 - val_loss: 0.2646 - val_accuracy: 0.9321 Epoch 191/250 180/180 [==============================] - 12s 69ms/step - loss: 0.0596 - accuracy: 0.9788 - val_loss: 0.2646 - val_accuracy: 0.9324 Epoch 192/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0573 - accuracy: 0.9798 - val_loss: 0.2646 - val_accuracy: 0.9324 Epoch 193/250 180/180 [==============================] - 13s 70ms/step - loss: 0.0584 - accuracy: 0.9799 - val_loss: 0.2646 - val_accuracy: 0.9324 Epoch 194/250 180/180 [==============================] - 13s 70ms/step - loss: 0.0584 - accuracy: 0.9792 - val_loss: 0.2646 - val_accuracy: 0.9324 Epoch 195/250 180/180 [==============================] - 12s 69ms/step - loss: 0.0578 - accuracy: 0.9794 - val_loss: 0.2646 - val_accuracy: 0.9324 Epoch 196/250 180/180 [==============================] - 13s 70ms/step - loss: 0.0576 - accuracy: 0.9791 - val_loss: 0.2647 - val_accuracy: 0.9324 Epoch 197/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0581 - accuracy: 0.9795 - val_loss: 0.2647 - val_accuracy: 0.9324 Epoch 198/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0579 - accuracy: 0.9803 - val_loss: 0.2646 - val_accuracy: 0.9324 Epoch 199/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0584 - accuracy: 0.9800 - val_loss: 0.2646 - val_accuracy: 0.9324 Epoch 200/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0601 - accuracy: 0.9789 - val_loss: 0.2646 - val_accuracy: 0.9328 Epoch 201/250 180/180 [==============================] - 12s 69ms/step - loss: 0.0581 - accuracy: 0.9793 - val_loss: 0.2646 - val_accuracy: 0.9324 Epoch 202/250 180/180 [==============================] - 12s 69ms/step - loss: 0.0597 - accuracy: 0.9801 - val_loss: 0.2646 - val_accuracy: 0.9328 Epoch 203/250 180/180 [==============================] - 13s 72ms/step - loss: 0.0591 - accuracy: 0.9801 - val_loss: 0.2647 - val_accuracy: 0.9324 Epoch 204/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0588 - accuracy: 0.9789 - val_loss: 0.2647 - val_accuracy: 0.9324 Epoch 205/250 180/180 [==============================] - 12s 69ms/step - loss: 0.0582 - accuracy: 0.9804 - val_loss: 0.2646 - val_accuracy: 0.9328 Epoch 206/250 180/180 [==============================] - 13s 71ms/step - loss: 0.0588 - accuracy: 0.9801 - val_loss: 0.2646 - val_accuracy: 0.9328 Epoch 207/250 180/180 [==============================] - 13s 70ms/step - loss: 0.0584 - accuracy: 0.9789 - val_loss: 0.2647 - val_accuracy: 0.9328 Epoch 208/250 180/180 [==============================] - 12s 69ms/step - loss: 0.0581 - accuracy: 0.9802 - val_loss: 0.2647 - val_accuracy: 0.9328 Epoch 209/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0597 - accuracy: 0.9795 - val_loss: 0.2647 - val_accuracy: 0.9328 Epoch 210/250 180/180 [==============================] - 12s 69ms/step - loss: 0.0589 - accuracy: 0.9787 - val_loss: 0.2647 - val_accuracy: 0.9328 Epoch 211/250 180/180 [==============================] - 12s 69ms/step - loss: 0.0597 - accuracy: 0.9780 - val_loss: 0.2647 - val_accuracy: 0.9328 Epoch 212/250 180/180 [==============================] - 13s 72ms/step - loss: 0.0581 - accuracy: 0.9793 - val_loss: 0.2647 - val_accuracy: 0.9321 Epoch 213/250 180/180 [==============================] - 12s 69ms/step - loss: 0.0582 - accuracy: 0.9802 - val_loss: 0.2648 - val_accuracy: 0.9321 Epoch 214/250 180/180 [==============================] - 12s 69ms/step - loss: 0.0591 - accuracy: 0.9801 - val_loss: 0.2647 - val_accuracy: 0.9328 Epoch 215/250 180/180 [==============================] - 12s 69ms/step - loss: 0.0582 - accuracy: 0.9795 - val_loss: 0.2648 - val_accuracy: 0.9324 Epoch 216/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0584 - accuracy: 0.9794 - val_loss: 0.2648 - val_accuracy: 0.9324 Epoch 217/250 180/180 [==============================] - 13s 70ms/step - loss: 0.0576 - accuracy: 0.9787 - val_loss: 0.2648 - val_accuracy: 0.9328 Epoch 218/250 180/180 [==============================] - 12s 69ms/step - loss: 0.0593 - accuracy: 0.9782 - val_loss: 0.2648 - val_accuracy: 0.9324 Epoch 219/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0586 - accuracy: 0.9804 - val_loss: 0.2648 - val_accuracy: 0.9324 Epoch 220/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0598 - accuracy: 0.9793 - val_loss: 0.2648 - val_accuracy: 0.9321 Epoch 221/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0594 - accuracy: 0.9792 - val_loss: 0.2648 - val_accuracy: 0.9324 Epoch 222/250 180/180 [==============================] - 12s 69ms/step - loss: 0.0589 - accuracy: 0.9794 - val_loss: 0.2648 - val_accuracy: 0.9328 Epoch 223/250 180/180 [==============================] - 12s 69ms/step - loss: 0.0605 - accuracy: 0.9783 - val_loss: 0.2648 - val_accuracy: 0.9328 Epoch 224/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0579 - accuracy: 0.9797 - val_loss: 0.2648 - val_accuracy: 0.9321 Epoch 225/250 180/180 [==============================] - 12s 66ms/step - loss: 0.0599 - accuracy: 0.9796 - val_loss: 0.2649 - val_accuracy: 0.9321 Epoch 226/250 180/180 [==============================] - 12s 67ms/step - loss: 0.0583 - accuracy: 0.9795 - val_loss: 0.2648 - val_accuracy: 0.9321 Epoch 227/250 180/180 [==============================] - 12s 66ms/step - loss: 0.0582 - accuracy: 0.9796 - val_loss: 0.2648 - val_accuracy: 0.9324 Epoch 228/250 180/180 [==============================] - 12s 67ms/step - loss: 0.0598 - accuracy: 0.9795 - val_loss: 0.2648 - val_accuracy: 0.9324 Epoch 229/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0579 - accuracy: 0.9796 - val_loss: 0.2648 - val_accuracy: 0.9321 Epoch 230/250 180/180 [==============================] - 12s 67ms/step - loss: 0.0572 - accuracy: 0.9794 - val_loss: 0.2649 - val_accuracy: 0.9324 Epoch 231/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0562 - accuracy: 0.9814 - val_loss: 0.2649 - val_accuracy: 0.9324 Epoch 232/250 180/180 [==============================] - 12s 69ms/step - loss: 0.0610 - accuracy: 0.9792 - val_loss: 0.2649 - val_accuracy: 0.9324 Epoch 233/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0602 - accuracy: 0.9781 - val_loss: 0.2648 - val_accuracy: 0.9328 Epoch 234/250 180/180 [==============================] - 12s 67ms/step - loss: 0.0586 - accuracy: 0.9803 - val_loss: 0.2649 - val_accuracy: 0.9328 Epoch 235/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0581 - accuracy: 0.9813 - val_loss: 0.2649 - val_accuracy: 0.9324 Epoch 236/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0577 - accuracy: 0.9802 - val_loss: 0.2650 - val_accuracy: 0.9324 Epoch 237/250 180/180 [==============================] - 12s 67ms/step - loss: 0.0582 - accuracy: 0.9799 - val_loss: 0.2649 - val_accuracy: 0.9324 Epoch 238/250 180/180 [==============================] - 12s 67ms/step - loss: 0.0578 - accuracy: 0.9795 - val_loss: 0.2649 - val_accuracy: 0.9324 Epoch 239/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0585 - accuracy: 0.9799 - val_loss: 0.2649 - val_accuracy: 0.9324 Epoch 240/250 180/180 [==============================] - 13s 71ms/step - loss: 0.0584 - accuracy: 0.9796 - val_loss: 0.2649 - val_accuracy: 0.9324 Epoch 241/250 180/180 [==============================] - 12s 67ms/step - loss: 0.0603 - accuracy: 0.9777 - val_loss: 0.2649 - val_accuracy: 0.9324 Epoch 242/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0583 - accuracy: 0.9782 - val_loss: 0.2649 - val_accuracy: 0.9324 Epoch 243/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0588 - accuracy: 0.9782 - val_loss: 0.2649 - val_accuracy: 0.9328 Epoch 244/250 180/180 [==============================] - 12s 67ms/step - loss: 0.0580 - accuracy: 0.9802 - val_loss: 0.2649 - val_accuracy: 0.9328 Epoch 245/250 180/180 [==============================] - 12s 67ms/step - loss: 0.0594 - accuracy: 0.9795 - val_loss: 0.2649 - val_accuracy: 0.9328 Epoch 246/250 180/180 [==============================] - 12s 66ms/step - loss: 0.0568 - accuracy: 0.9802 - val_loss: 0.2649 - val_accuracy: 0.9324 Epoch 247/250 180/180 [==============================] - 12s 68ms/step - loss: 0.0581 - accuracy: 0.9801 - val_loss: 0.2650 - val_accuracy: 0.9324 Epoch 248/250 180/180 [==============================] - 12s 67ms/step - loss: 0.0587 - accuracy: 0.9796 - val_loss: 0.2650 - val_accuracy: 0.9324 Epoch 249/250 180/180 [==============================] - 12s 67ms/step - loss: 0.0588 - accuracy: 0.9795 - val_loss: 0.2649 - val_accuracy: 0.9324 Epoch 250/250 180/180 [==============================] - 12s 67ms/step - loss: 0.0597 - accuracy: 0.9795 - val_loss: 0.2649 - val_accuracy: 0.9324
plt.style.use('fivethirtyeight')
epochs = [i for i in range(250)]
fig , ax = plt.subplots(1,2)
train_acc = history_1_g.history['accuracy']
train_loss = history_1_g.history['loss']
test_acc = history_1_g.history['val_accuracy']
test_loss = history_1_g.history['val_loss']
fig.set_size_inches(20,6)
ax[0].plot(epochs , train_loss , label = 'Training Loss')
ax[0].plot(epochs , test_loss , label = 'Testing Loss')
ax[0].set_title('Training & Testing Loss')
ax[0].legend()
ax[0].set_xlabel("Epochs")
ax[1].plot(epochs , train_acc , label = 'Training Accuracy')
ax[1].plot(epochs , test_acc , label = 'Testing Accuracy')
ax[1].set_title('Training & Testing Accuracy')
ax[1].legend()
ax[1].set_xlabel("Epochs")
plt.show()
# Save Model
model_1_g.save('Trained Models/model_1_g.h5')
x_train, x_test, y_train, y_test = X_train, X_test, y2_train, y2_test
size = 13
x_train = np.array(x_train)
y_train = np.array(y_train)
x_test = np.array(x_test)
y_test = np.array(y_test)
x_train = x_train.reshape(x_train.shape[0], size, 1)
x_test = x_test.reshape(x_test.shape[0], size, 1)
y_train = to_categorical(y_train)
y_test = to_categorical(y_test)
# Create Model
model_1_e=Sequential()
model_1_e.add(Conv1D(256, kernel_size=5, strides=1, padding='same', activation='relu', input_shape=(x_train.shape[1], 1)))
model_1_e.add(MaxPooling1D(pool_size=5, strides = 2, padding = 'same'))
model_1_e.add(Conv1D(256, kernel_size=5, strides=1, padding='same', activation='relu'))
model_1_e.add(MaxPooling1D(pool_size=5, strides = 2, padding = 'same'))
model_1_e.add(Conv1D(128, kernel_size=5, strides=1, padding='same', activation='relu'))
model_1_e.add(MaxPooling1D(pool_size=5, strides = 2, padding = 'same'))
model_1_e.add(Dropout(0.2))
model_1_e.add(Conv1D(64, kernel_size=5, strides=1, padding='same', activation='relu'))
model_1_e.add(MaxPooling1D(pool_size=5, strides = 2, padding = 'same'))
model_1_e.add(Flatten())
model_1_e.add(Dense(units=32, activation='relu'))
model_1_e.add(Dropout(0.3))
model_1_e.add(Dense(units=7, activation='softmax'))
model_1_e.compile(optimizer = 'adam' , loss = 'categorical_crossentropy' , metrics = ['accuracy'])
model_1_e.summary()
Model: "sequential_1" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= conv1d_4 (Conv1D) (None, 13, 256) 1536 _________________________________________________________________ max_pooling1d_4 (MaxPooling1 (None, 7, 256) 0 _________________________________________________________________ conv1d_5 (Conv1D) (None, 7, 256) 327936 _________________________________________________________________ max_pooling1d_5 (MaxPooling1 (None, 4, 256) 0 _________________________________________________________________ conv1d_6 (Conv1D) (None, 4, 128) 163968 _________________________________________________________________ max_pooling1d_6 (MaxPooling1 (None, 2, 128) 0 _________________________________________________________________ dropout_2 (Dropout) (None, 2, 128) 0 _________________________________________________________________ conv1d_7 (Conv1D) (None, 2, 64) 41024 _________________________________________________________________ max_pooling1d_7 (MaxPooling1 (None, 1, 64) 0 _________________________________________________________________ flatten_1 (Flatten) (None, 64) 0 _________________________________________________________________ dense_2 (Dense) (None, 32) 2080 _________________________________________________________________ dropout_3 (Dropout) (None, 32) 0 _________________________________________________________________ dense_3 (Dense) (None, 7) 231 ================================================================= Total params: 536,775 Trainable params: 536,775 Non-trainable params: 0 _________________________________________________________________
# Training
rlrp = ReduceLROnPlateau(monitor='loss', factor=0.4, verbose=0, patience=2, min_lr=0.0000001)
history_1_e = model_1_e.fit(x_train, y_train, batch_size=64, epochs=250, validation_data=(x_test, y_test), callbacks=[rlrp])
Epoch 1/250 180/180 [==============================] - 12s 67ms/step - loss: 1.8587 - accuracy: 0.2220 - val_loss: 1.7862 - val_accuracy: 0.3338 Epoch 2/250 180/180 [==============================] - 12s 67ms/step - loss: 1.6455 - accuracy: 0.3540 - val_loss: 1.5594 - val_accuracy: 0.3937 Epoch 3/250 180/180 [==============================] - 12s 67ms/step - loss: 1.5372 - accuracy: 0.4015 - val_loss: 1.4721 - val_accuracy: 0.4296 Epoch 4/250 180/180 [==============================] - 12s 66ms/step - loss: 1.4713 - accuracy: 0.4334 - val_loss: 1.4547 - val_accuracy: 0.4345 Epoch 5/250 180/180 [==============================] - 12s 66ms/step - loss: 1.4413 - accuracy: 0.4430 - val_loss: 1.4069 - val_accuracy: 0.4551 Epoch 6/250 180/180 [==============================] - 12s 67ms/step - loss: 1.4074 - accuracy: 0.4571 - val_loss: 1.3547 - val_accuracy: 0.4700 Epoch 7/250 180/180 [==============================] - 12s 66ms/step - loss: 1.3654 - accuracy: 0.4745 - val_loss: 1.3528 - val_accuracy: 0.4672 Epoch 8/250 180/180 [==============================] - 12s 66ms/step - loss: 1.3418 - accuracy: 0.4834 - val_loss: 1.2980 - val_accuracy: 0.4948 Epoch 9/250 180/180 [==============================] - 12s 66ms/step - loss: 1.3159 - accuracy: 0.4918 - val_loss: 1.2990 - val_accuracy: 0.4923 Epoch 10/250 180/180 [==============================] - 12s 67ms/step - loss: 1.3122 - accuracy: 0.4961 - val_loss: 1.3151 - val_accuracy: 0.4801 Epoch 11/250 180/180 [==============================] - 12s 68ms/step - loss: 1.2853 - accuracy: 0.5019 - val_loss: 1.2615 - val_accuracy: 0.5118 Epoch 12/250 180/180 [==============================] - 12s 67ms/step - loss: 1.2796 - accuracy: 0.5116 - val_loss: 1.2525 - val_accuracy: 0.5045 Epoch 13/250 180/180 [==============================] - 12s 68ms/step - loss: 1.2621 - accuracy: 0.5166 - val_loss: 1.2666 - val_accuracy: 0.5049 Epoch 14/250 180/180 [==============================] - 12s 68ms/step - loss: 1.2402 - accuracy: 0.5268 - val_loss: 1.2384 - val_accuracy: 0.5105 Epoch 15/250 180/180 [==============================] - 13s 70ms/step - loss: 1.2282 - accuracy: 0.5295 - val_loss: 1.2706 - val_accuracy: 0.5017 Epoch 16/250 180/180 [==============================] - 12s 66ms/step - loss: 1.2196 - accuracy: 0.5321 - val_loss: 1.2295 - val_accuracy: 0.5247 Epoch 17/250 180/180 [==============================] - 12s 66ms/step - loss: 1.1989 - accuracy: 0.5420 - val_loss: 1.2131 - val_accuracy: 0.5237 Epoch 18/250 180/180 [==============================] - 12s 66ms/step - loss: 1.1852 - accuracy: 0.5429 - val_loss: 1.1986 - val_accuracy: 0.5324 Epoch 19/250 180/180 [==============================] - 12s 67ms/step - loss: 1.1837 - accuracy: 0.5459 - val_loss: 1.2160 - val_accuracy: 0.5171 Epoch 20/250 180/180 [==============================] - 12s 67ms/step - loss: 1.1730 - accuracy: 0.5490 - val_loss: 1.1893 - val_accuracy: 0.5310 Epoch 21/250 180/180 [==============================] - 12s 68ms/step - loss: 1.1754 - accuracy: 0.5456 - val_loss: 1.2003 - val_accuracy: 0.5331 Epoch 22/250 180/180 [==============================] - 12s 67ms/step - loss: 1.1504 - accuracy: 0.5581 - val_loss: 1.1770 - val_accuracy: 0.5369 Epoch 23/250 180/180 [==============================] - 12s 66ms/step - loss: 1.1521 - accuracy: 0.5530 - val_loss: 1.1929 - val_accuracy: 0.5331 Epoch 24/250 180/180 [==============================] - 12s 67ms/step - loss: 1.1433 - accuracy: 0.5586 - val_loss: 1.1442 - val_accuracy: 0.5498 Epoch 25/250 180/180 [==============================] - 12s 67ms/step - loss: 1.1294 - accuracy: 0.5652 - val_loss: 1.1538 - val_accuracy: 0.5467 Epoch 26/250 180/180 [==============================] - 12s 69ms/step - loss: 1.1196 - accuracy: 0.5684 - val_loss: 1.1523 - val_accuracy: 0.5606 Epoch 27/250 180/180 [==============================] - 12s 68ms/step - loss: 1.1079 - accuracy: 0.5700 - val_loss: 1.1366 - val_accuracy: 0.5537 Epoch 28/250 180/180 [==============================] - 12s 66ms/step - loss: 1.1025 - accuracy: 0.5771 - val_loss: 1.1470 - val_accuracy: 0.5505 Epoch 29/250 180/180 [==============================] - 12s 67ms/step - loss: 1.0986 - accuracy: 0.5770 - val_loss: 1.1402 - val_accuracy: 0.5530 Epoch 30/250 180/180 [==============================] - 12s 66ms/step - loss: 1.0893 - accuracy: 0.5835 - val_loss: 1.1163 - val_accuracy: 0.5620 Epoch 31/250 180/180 [==============================] - 12s 67ms/step - loss: 1.0782 - accuracy: 0.5867 - val_loss: 1.1337 - val_accuracy: 0.5599 Epoch 32/250 180/180 [==============================] - 12s 67ms/step - loss: 1.0748 - accuracy: 0.5859 - val_loss: 1.1263 - val_accuracy: 0.5690 Epoch 33/250 180/180 [==============================] - 12s 67ms/step - loss: 1.0675 - accuracy: 0.5855 - val_loss: 1.1409 - val_accuracy: 0.5443 Epoch 34/250 180/180 [==============================] - 12s 67ms/step - loss: 1.0759 - accuracy: 0.5860 - val_loss: 1.1883 - val_accuracy: 0.5352 Epoch 35/250 180/180 [==============================] - 12s 66ms/step - loss: 1.0609 - accuracy: 0.5949 - val_loss: 1.1204 - val_accuracy: 0.5638 Epoch 36/250 180/180 [==============================] - 12s 66ms/step - loss: 1.0618 - accuracy: 0.5944 - val_loss: 1.1355 - val_accuracy: 0.5648 Epoch 37/250 180/180 [==============================] - 12s 67ms/step - loss: 1.0410 - accuracy: 0.6003 - val_loss: 1.1206 - val_accuracy: 0.5728 Epoch 38/250 180/180 [==============================] - 12s 66ms/step - loss: 1.0241 - accuracy: 0.6070 - val_loss: 1.1118 - val_accuracy: 0.5669 Epoch 39/250 180/180 [==============================] - 12s 67ms/step - loss: 1.0318 - accuracy: 0.6011 - val_loss: 1.1562 - val_accuracy: 0.5530 Epoch 40/250 180/180 [==============================] - 12s 66ms/step - loss: 1.0217 - accuracy: 0.6035 - val_loss: 1.1136 - val_accuracy: 0.5746 Epoch 41/250 180/180 [==============================] - 12s 68ms/step - loss: 1.0126 - accuracy: 0.6118 - val_loss: 1.1146 - val_accuracy: 0.5638 Epoch 42/250 180/180 [==============================] - 12s 67ms/step - loss: 0.9983 - accuracy: 0.6157 - val_loss: 1.1278 - val_accuracy: 0.5683 Epoch 43/250 180/180 [==============================] - 12s 67ms/step - loss: 1.0101 - accuracy: 0.6130 - val_loss: 1.1381 - val_accuracy: 0.5697 Epoch 44/250 180/180 [==============================] - 12s 66ms/step - loss: 0.9910 - accuracy: 0.6189 - val_loss: 1.1612 - val_accuracy: 0.5634 Epoch 45/250 180/180 [==============================] - 12s 67ms/step - loss: 0.9906 - accuracy: 0.6218 - val_loss: 1.1201 - val_accuracy: 0.5725 Epoch 46/250 180/180 [==============================] - 12s 66ms/step - loss: 0.9878 - accuracy: 0.6253 - val_loss: 1.1181 - val_accuracy: 0.5777 Epoch 47/250 180/180 [==============================] - 12s 67ms/step - loss: 0.9857 - accuracy: 0.6225 - val_loss: 1.1047 - val_accuracy: 0.5728 Epoch 48/250 180/180 [==============================] - 12s 66ms/step - loss: 0.9728 - accuracy: 0.6259 - val_loss: 1.0998 - val_accuracy: 0.5815 Epoch 49/250 180/180 [==============================] - 12s 67ms/step - loss: 0.9660 - accuracy: 0.6278 - val_loss: 1.1174 - val_accuracy: 0.5693 Epoch 50/250 180/180 [==============================] - 12s 67ms/step - loss: 0.9665 - accuracy: 0.6280 - val_loss: 1.1231 - val_accuracy: 0.5868 Epoch 51/250 180/180 [==============================] - 12s 68ms/step - loss: 0.9523 - accuracy: 0.6410 - val_loss: 1.1171 - val_accuracy: 0.5728 Epoch 52/250 180/180 [==============================] - 12s 66ms/step - loss: 0.9492 - accuracy: 0.6355 - val_loss: 1.1638 - val_accuracy: 0.5833 Epoch 53/250 180/180 [==============================] - 12s 67ms/step - loss: 0.9485 - accuracy: 0.6374 - val_loss: 1.0954 - val_accuracy: 0.5812 Epoch 54/250 180/180 [==============================] - 12s 66ms/step - loss: 0.9341 - accuracy: 0.6399 - val_loss: 1.1535 - val_accuracy: 0.5620 Epoch 55/250 180/180 [==============================] - 12s 67ms/step - loss: 0.9466 - accuracy: 0.6368 - val_loss: 1.1641 - val_accuracy: 0.5700 Epoch 56/250 180/180 [==============================] - 12s 67ms/step - loss: 0.9468 - accuracy: 0.6370 - val_loss: 1.1026 - val_accuracy: 0.5805 Epoch 57/250 180/180 [==============================] - 12s 69ms/step - loss: 0.8739 - accuracy: 0.6650 - val_loss: 1.1159 - val_accuracy: 0.5889 Epoch 58/250 180/180 [==============================] - 12s 65ms/step - loss: 0.8558 - accuracy: 0.6722 - val_loss: 1.0954 - val_accuracy: 0.5916 Epoch 59/250 180/180 [==============================] - 12s 66ms/step - loss: 0.8394 - accuracy: 0.6765 - val_loss: 1.1260 - val_accuracy: 0.5902 Epoch 60/250 180/180 [==============================] - 12s 66ms/step - loss: 0.8348 - accuracy: 0.6803 - val_loss: 1.1375 - val_accuracy: 0.5850 Epoch 61/250 180/180 [==============================] - 12s 67ms/step - loss: 0.8297 - accuracy: 0.6836 - val_loss: 1.1337 - val_accuracy: 0.5899 Epoch 62/250 180/180 [==============================] - 12s 67ms/step - loss: 0.8202 - accuracy: 0.6891 - val_loss: 1.1420 - val_accuracy: 0.5843 Epoch 63/250 180/180 [==============================] - 12s 65ms/step - loss: 0.8172 - accuracy: 0.6886 - val_loss: 1.1526 - val_accuracy: 0.5878 Epoch 64/250 180/180 [==============================] - 12s 65ms/step - loss: 0.8190 - accuracy: 0.6869 - val_loss: 1.1709 - val_accuracy: 0.5836 Epoch 65/250 180/180 [==============================] - 13s 70ms/step - loss: 0.8168 - accuracy: 0.6883 - val_loss: 1.1566 - val_accuracy: 0.5857 Epoch 66/250 180/180 [==============================] - 12s 66ms/step - loss: 0.8051 - accuracy: 0.6881 - val_loss: 1.1916 - val_accuracy: 0.5770 Epoch 67/250 180/180 [==============================] - 12s 66ms/step - loss: 0.8098 - accuracy: 0.6885 - val_loss: 1.1554 - val_accuracy: 0.5833 Epoch 68/250 180/180 [==============================] - 12s 65ms/step - loss: 0.7952 - accuracy: 0.6939 - val_loss: 1.1419 - val_accuracy: 0.5902 Epoch 69/250 180/180 [==============================] - 12s 66ms/step - loss: 0.7962 - accuracy: 0.6963 - val_loss: 1.1676 - val_accuracy: 0.5850 Epoch 70/250 180/180 [==============================] - 12s 66ms/step - loss: 0.7818 - accuracy: 0.7043 - val_loss: 1.1665 - val_accuracy: 0.5955 Epoch 71/250 180/180 [==============================] - 12s 67ms/step - loss: 0.7919 - accuracy: 0.6970 - val_loss: 1.1495 - val_accuracy: 0.5861 Epoch 72/250 180/180 [==============================] - 12s 66ms/step - loss: 0.7827 - accuracy: 0.7010 - val_loss: 1.1686 - val_accuracy: 0.5909 Epoch 73/250 180/180 [==============================] - 12s 65ms/step - loss: 0.7501 - accuracy: 0.7139 - val_loss: 1.1844 - val_accuracy: 0.5965 Epoch 74/250 180/180 [==============================] - 12s 66ms/step - loss: 0.7416 - accuracy: 0.7153 - val_loss: 1.1861 - val_accuracy: 0.5955 Epoch 75/250 180/180 [==============================] - 12s 67ms/step - loss: 0.7368 - accuracy: 0.7201 - val_loss: 1.1898 - val_accuracy: 0.5955 Epoch 76/250 180/180 [==============================] - 12s 66ms/step - loss: 0.7312 - accuracy: 0.7219 - val_loss: 1.1831 - val_accuracy: 0.6017 Epoch 77/250 180/180 [==============================] - 12s 67ms/step - loss: 0.7371 - accuracy: 0.7164 - val_loss: 1.2031 - val_accuracy: 0.5965 Epoch 78/250 180/180 [==============================] - 12s 65ms/step - loss: 0.7261 - accuracy: 0.7256 - val_loss: 1.2059 - val_accuracy: 0.5941 Epoch 79/250 180/180 [==============================] - 12s 65ms/step - loss: 0.7338 - accuracy: 0.7213 - val_loss: 1.2058 - val_accuracy: 0.5983 Epoch 80/250 180/180 [==============================] - 12s 66ms/step - loss: 0.7202 - accuracy: 0.7261 - val_loss: 1.2024 - val_accuracy: 0.5916 Epoch 81/250 180/180 [==============================] - 12s 66ms/step - loss: 0.7179 - accuracy: 0.7294 - val_loss: 1.2107 - val_accuracy: 0.5920 Epoch 82/250 180/180 [==============================] - 12s 67ms/step - loss: 0.7260 - accuracy: 0.7241 - val_loss: 1.1973 - val_accuracy: 0.5979 Epoch 83/250 180/180 [==============================] - 12s 66ms/step - loss: 0.7206 - accuracy: 0.7267 - val_loss: 1.2170 - val_accuracy: 0.5930 Epoch 84/250 180/180 [==============================] - 12s 66ms/step - loss: 0.7037 - accuracy: 0.7372 - val_loss: 1.2186 - val_accuracy: 0.5934 Epoch 85/250 180/180 [==============================] - 12s 66ms/step - loss: 0.7032 - accuracy: 0.7317 - val_loss: 1.2208 - val_accuracy: 0.5909 Epoch 86/250 180/180 [==============================] - 12s 66ms/step - loss: 0.7015 - accuracy: 0.7344 - val_loss: 1.2162 - val_accuracy: 0.5972 Epoch 87/250 180/180 [==============================] - 12s 66ms/step - loss: 0.6938 - accuracy: 0.7410 - val_loss: 1.2246 - val_accuracy: 0.5976 Epoch 88/250 180/180 [==============================] - 12s 66ms/step - loss: 0.6941 - accuracy: 0.7414 - val_loss: 1.2299 - val_accuracy: 0.5944 Epoch 89/250 180/180 [==============================] - 11s 59ms/step - loss: 0.7022 - accuracy: 0.7328 - val_loss: 1.2245 - val_accuracy: 0.5958 Epoch 90/250 180/180 [==============================] - 11s 58ms/step - loss: 0.6852 - accuracy: 0.7416 - val_loss: 1.2217 - val_accuracy: 0.5972 Epoch 91/250 180/180 [==============================] - 11s 59ms/step - loss: 0.6915 - accuracy: 0.7418 - val_loss: 1.2267 - val_accuracy: 0.5976 Epoch 92/250 180/180 [==============================] - 11s 61ms/step - loss: 0.6855 - accuracy: 0.7389 - val_loss: 1.2308 - val_accuracy: 0.5948 Epoch 93/250 180/180 [==============================] - 11s 60ms/step - loss: 0.6839 - accuracy: 0.7386 - val_loss: 1.2335 - val_accuracy: 0.5969 Epoch 94/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6809 - accuracy: 0.7421 - val_loss: 1.2333 - val_accuracy: 0.5976 Epoch 95/250 180/180 [==============================] - 11s 59ms/step - loss: 0.6880 - accuracy: 0.7388 - val_loss: 1.2337 - val_accuracy: 0.5972 Epoch 96/250 180/180 [==============================] - 11s 59ms/step - loss: 0.6809 - accuracy: 0.7424 - val_loss: 1.2293 - val_accuracy: 0.5972 Epoch 97/250 180/180 [==============================] - 11s 58ms/step - loss: 0.6848 - accuracy: 0.7446 - val_loss: 1.2331 - val_accuracy: 0.5972 Epoch 98/250 180/180 [==============================] - 11s 59ms/step - loss: 0.6808 - accuracy: 0.7451 - val_loss: 1.2339 - val_accuracy: 0.5979 Epoch 99/250 180/180 [==============================] - 11s 58ms/step - loss: 0.6834 - accuracy: 0.7443 - val_loss: 1.2335 - val_accuracy: 0.5983 Epoch 100/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6802 - accuracy: 0.7441 - val_loss: 1.2340 - val_accuracy: 0.5979 Epoch 101/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6824 - accuracy: 0.7453 - val_loss: 1.2343 - val_accuracy: 0.5986 Epoch 102/250 180/180 [==============================] - 11s 58ms/step - loss: 0.6802 - accuracy: 0.7451 - val_loss: 1.2346 - val_accuracy: 0.5979 Epoch 103/250 180/180 [==============================] - 11s 60ms/step - loss: 0.6805 - accuracy: 0.7422 - val_loss: 1.2347 - val_accuracy: 0.5979 Epoch 104/250 180/180 [==============================] - 11s 59ms/step - loss: 0.6821 - accuracy: 0.7443 - val_loss: 1.2346 - val_accuracy: 0.5979 Epoch 105/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6761 - accuracy: 0.7442 - val_loss: 1.2346 - val_accuracy: 0.5979 Epoch 106/250 180/180 [==============================] - 11s 58ms/step - loss: 0.6841 - accuracy: 0.7418 - val_loss: 1.2347 - val_accuracy: 0.5979 Epoch 107/250 180/180 [==============================] - 11s 59ms/step - loss: 0.6797 - accuracy: 0.7436 - val_loss: 1.2345 - val_accuracy: 0.5976 Epoch 108/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6869 - accuracy: 0.7409 - val_loss: 1.2345 - val_accuracy: 0.5979 Epoch 109/250 180/180 [==============================] - 11s 59ms/step - loss: 0.6836 - accuracy: 0.7444 - val_loss: 1.2345 - val_accuracy: 0.5979 Epoch 110/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6787 - accuracy: 0.7420 - val_loss: 1.2346 - val_accuracy: 0.5976 Epoch 111/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6772 - accuracy: 0.7415 - val_loss: 1.2346 - val_accuracy: 0.5976 Epoch 112/250 180/180 [==============================] - 11s 59ms/step - loss: 0.6816 - accuracy: 0.7439 - val_loss: 1.2347 - val_accuracy: 0.5976 Epoch 113/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6820 - accuracy: 0.7425 - val_loss: 1.2347 - val_accuracy: 0.5976 Epoch 114/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6867 - accuracy: 0.7416 - val_loss: 1.2347 - val_accuracy: 0.5976 Epoch 115/250 180/180 [==============================] - 11s 61ms/step - loss: 0.6815 - accuracy: 0.7424 - val_loss: 1.2346 - val_accuracy: 0.5976 Epoch 116/250 180/180 [==============================] - 11s 60ms/step - loss: 0.6811 - accuracy: 0.7454 - val_loss: 1.2347 - val_accuracy: 0.5976 Epoch 117/250 180/180 [==============================] - 11s 58ms/step - loss: 0.6833 - accuracy: 0.7419 - val_loss: 1.2346 - val_accuracy: 0.5976 Epoch 118/250 180/180 [==============================] - 11s 58ms/step - loss: 0.6817 - accuracy: 0.7443 - val_loss: 1.2346 - val_accuracy: 0.5979 Epoch 119/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6829 - accuracy: 0.7434 - val_loss: 1.2346 - val_accuracy: 0.5979 Epoch 120/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6828 - accuracy: 0.7449 - val_loss: 1.2345 - val_accuracy: 0.5976 Epoch 121/250 180/180 [==============================] - 11s 58ms/step - loss: 0.6816 - accuracy: 0.7436 - val_loss: 1.2345 - val_accuracy: 0.5976 Epoch 122/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6842 - accuracy: 0.7388 - val_loss: 1.2345 - val_accuracy: 0.5972 Epoch 123/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6851 - accuracy: 0.7429 - val_loss: 1.2345 - val_accuracy: 0.5972 Epoch 124/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6833 - accuracy: 0.7436 - val_loss: 1.2345 - val_accuracy: 0.5972 Epoch 125/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6804 - accuracy: 0.7394 - val_loss: 1.2345 - val_accuracy: 0.5972 Epoch 126/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6830 - accuracy: 0.7442 - val_loss: 1.2345 - val_accuracy: 0.5969 Epoch 127/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6830 - accuracy: 0.7456 - val_loss: 1.2345 - val_accuracy: 0.5969 Epoch 128/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6832 - accuracy: 0.7432 - val_loss: 1.2345 - val_accuracy: 0.5969 Epoch 129/250 180/180 [==============================] - 11s 58ms/step - loss: 0.6807 - accuracy: 0.7455 - val_loss: 1.2346 - val_accuracy: 0.5969 Epoch 130/250 180/180 [==============================] - 11s 60ms/step - loss: 0.6844 - accuracy: 0.7436 - val_loss: 1.2346 - val_accuracy: 0.5972 Epoch 131/250 180/180 [==============================] - 11s 59ms/step - loss: 0.6831 - accuracy: 0.7409 - val_loss: 1.2346 - val_accuracy: 0.5972 Epoch 132/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6818 - accuracy: 0.7418 - val_loss: 1.2346 - val_accuracy: 0.5972 Epoch 133/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6791 - accuracy: 0.7435 - val_loss: 1.2346 - val_accuracy: 0.5969 Epoch 134/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6782 - accuracy: 0.7443 - val_loss: 1.2346 - val_accuracy: 0.5976 Epoch 135/250 180/180 [==============================] - 11s 59ms/step - loss: 0.6811 - accuracy: 0.7426 - val_loss: 1.2346 - val_accuracy: 0.5976 Epoch 136/250 180/180 [==============================] - 11s 59ms/step - loss: 0.6787 - accuracy: 0.7436 - val_loss: 1.2346 - val_accuracy: 0.5976 Epoch 137/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6846 - accuracy: 0.7439 - val_loss: 1.2347 - val_accuracy: 0.5976 Epoch 138/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6843 - accuracy: 0.7429 - val_loss: 1.2348 - val_accuracy: 0.5976 Epoch 139/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6809 - accuracy: 0.7422 - val_loss: 1.2347 - val_accuracy: 0.5976 Epoch 140/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6857 - accuracy: 0.7431 - val_loss: 1.2346 - val_accuracy: 0.5976 Epoch 141/250 180/180 [==============================] - 11s 60ms/step - loss: 0.6794 - accuracy: 0.7447 - val_loss: 1.2346 - val_accuracy: 0.5976 Epoch 142/250 180/180 [==============================] - 11s 59ms/step - loss: 0.6856 - accuracy: 0.7436 - val_loss: 1.2346 - val_accuracy: 0.5976 Epoch 143/250 180/180 [==============================] - 11s 59ms/step - loss: 0.6792 - accuracy: 0.7438 - val_loss: 1.2346 - val_accuracy: 0.5976 Epoch 144/250 180/180 [==============================] - 11s 59ms/step - loss: 0.6802 - accuracy: 0.7408 - val_loss: 1.2347 - val_accuracy: 0.5976 Epoch 145/250 180/180 [==============================] - 11s 58ms/step - loss: 0.6779 - accuracy: 0.7437 - val_loss: 1.2346 - val_accuracy: 0.5976 Epoch 146/250 180/180 [==============================] - 11s 59ms/step - loss: 0.6835 - accuracy: 0.7408 - val_loss: 1.2346 - val_accuracy: 0.5976 Epoch 147/250 180/180 [==============================] - 11s 60ms/step - loss: 0.6790 - accuracy: 0.7449 - val_loss: 1.2346 - val_accuracy: 0.5976 Epoch 148/250 180/180 [==============================] - 11s 63ms/step - loss: 0.6799 - accuracy: 0.7432 - val_loss: 1.2347 - val_accuracy: 0.5976 Epoch 149/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6751 - accuracy: 0.7449 - val_loss: 1.2347 - val_accuracy: 0.5976 Epoch 150/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6869 - accuracy: 0.7427 - val_loss: 1.2348 - val_accuracy: 0.5972 Epoch 151/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6780 - accuracy: 0.7450 - val_loss: 1.2348 - val_accuracy: 0.5976 Epoch 152/250 180/180 [==============================] - 11s 58ms/step - loss: 0.6798 - accuracy: 0.7441 - val_loss: 1.2348 - val_accuracy: 0.5976 Epoch 153/250 180/180 [==============================] - 11s 61ms/step - loss: 0.6824 - accuracy: 0.7410 - val_loss: 1.2348 - val_accuracy: 0.5976 Epoch 154/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6791 - accuracy: 0.7481 - val_loss: 1.2347 - val_accuracy: 0.5972 Epoch 155/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6870 - accuracy: 0.7396 - val_loss: 1.2347 - val_accuracy: 0.5972 Epoch 156/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6844 - accuracy: 0.7449 - val_loss: 1.2347 - val_accuracy: 0.5976 Epoch 157/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6889 - accuracy: 0.7440 - val_loss: 1.2347 - val_accuracy: 0.5976 Epoch 158/250 180/180 [==============================] - 11s 59ms/step - loss: 0.6804 - accuracy: 0.7429 - val_loss: 1.2347 - val_accuracy: 0.5972 Epoch 159/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6809 - accuracy: 0.7426 - val_loss: 1.2346 - val_accuracy: 0.5972 Epoch 160/250 180/180 [==============================] - 11s 59ms/step - loss: 0.6809 - accuracy: 0.7428 - val_loss: 1.2347 - val_accuracy: 0.5976 Epoch 161/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6794 - accuracy: 0.7469 - val_loss: 1.2347 - val_accuracy: 0.5972 Epoch 162/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6817 - accuracy: 0.7420 - val_loss: 1.2347 - val_accuracy: 0.5979 Epoch 163/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6848 - accuracy: 0.7405 - val_loss: 1.2346 - val_accuracy: 0.5979 Epoch 164/250 180/180 [==============================] - 11s 60ms/step - loss: 0.6864 - accuracy: 0.7375 - val_loss: 1.2347 - val_accuracy: 0.5979 Epoch 165/250 180/180 [==============================] - 11s 59ms/step - loss: 0.6871 - accuracy: 0.7415 - val_loss: 1.2346 - val_accuracy: 0.5976 Epoch 166/250 180/180 [==============================] - 11s 59ms/step - loss: 0.6810 - accuracy: 0.7443 - val_loss: 1.2346 - val_accuracy: 0.5979 Epoch 167/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6801 - accuracy: 0.7428 - val_loss: 1.2346 - val_accuracy: 0.5979 Epoch 168/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6857 - accuracy: 0.7465 - val_loss: 1.2347 - val_accuracy: 0.5979 Epoch 169/250 180/180 [==============================] - 11s 59ms/step - loss: 0.6830 - accuracy: 0.7436 - val_loss: 1.2347 - val_accuracy: 0.5979 Epoch 170/250 180/180 [==============================] - 11s 59ms/step - loss: 0.6826 - accuracy: 0.7409 - val_loss: 1.2347 - val_accuracy: 0.5979 Epoch 171/250 180/180 [==============================] - 11s 58ms/step - loss: 0.6798 - accuracy: 0.7443 - val_loss: 1.2347 - val_accuracy: 0.5979 Epoch 172/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6830 - accuracy: 0.7409 - val_loss: 1.2347 - val_accuracy: 0.5979 Epoch 173/250 180/180 [==============================] - 11s 59ms/step - loss: 0.6799 - accuracy: 0.7452 - val_loss: 1.2347 - val_accuracy: 0.5976 Epoch 174/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6805 - accuracy: 0.7413 - val_loss: 1.2347 - val_accuracy: 0.5976 Epoch 175/250 180/180 [==============================] - 11s 59ms/step - loss: 0.6784 - accuracy: 0.7452 - val_loss: 1.2347 - val_accuracy: 0.5972 Epoch 176/250 180/180 [==============================] - 11s 59ms/step - loss: 0.6835 - accuracy: 0.7421 - val_loss: 1.2348 - val_accuracy: 0.5972 Epoch 177/250 180/180 [==============================] - 11s 59ms/step - loss: 0.6833 - accuracy: 0.7404 - val_loss: 1.2347 - val_accuracy: 0.5972 Epoch 178/250 180/180 [==============================] - 11s 59ms/step - loss: 0.6793 - accuracy: 0.7453 - val_loss: 1.2348 - val_accuracy: 0.5976 Epoch 179/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6792 - accuracy: 0.7454 - val_loss: 1.2348 - val_accuracy: 0.5976 Epoch 180/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6796 - accuracy: 0.7447 - val_loss: 1.2348 - val_accuracy: 0.5979 Epoch 181/250 180/180 [==============================] - 11s 59ms/step - loss: 0.6807 - accuracy: 0.7421 - val_loss: 1.2348 - val_accuracy: 0.5976 Epoch 182/250 180/180 [==============================] - 11s 58ms/step - loss: 0.6838 - accuracy: 0.7404 - val_loss: 1.2348 - val_accuracy: 0.5979 Epoch 183/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6785 - accuracy: 0.7429 - val_loss: 1.2348 - val_accuracy: 0.5979 Epoch 184/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6838 - accuracy: 0.7389 - val_loss: 1.2348 - val_accuracy: 0.5972 Epoch 185/250 180/180 [==============================] - 11s 59ms/step - loss: 0.6833 - accuracy: 0.7414 - val_loss: 1.2348 - val_accuracy: 0.5979 Epoch 186/250 180/180 [==============================] - 11s 59ms/step - loss: 0.6896 - accuracy: 0.7350 - val_loss: 1.2348 - val_accuracy: 0.5979 Epoch 187/250 180/180 [==============================] - 11s 60ms/step - loss: 0.6798 - accuracy: 0.7446 - val_loss: 1.2348 - val_accuracy: 0.5979 Epoch 188/250 180/180 [==============================] - 11s 59ms/step - loss: 0.6805 - accuracy: 0.7432 - val_loss: 1.2349 - val_accuracy: 0.5976 Epoch 189/250 180/180 [==============================] - 11s 58ms/step - loss: 0.6830 - accuracy: 0.7440 - val_loss: 1.2349 - val_accuracy: 0.5976 Epoch 190/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6844 - accuracy: 0.7424 - val_loss: 1.2349 - val_accuracy: 0.5976 Epoch 191/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6804 - accuracy: 0.7435 - val_loss: 1.2349 - val_accuracy: 0.5976 Epoch 192/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6794 - accuracy: 0.7443 - val_loss: 1.2348 - val_accuracy: 0.5972 Epoch 193/250 180/180 [==============================] - 11s 59ms/step - loss: 0.6842 - accuracy: 0.7448 - val_loss: 1.2349 - val_accuracy: 0.5976 Epoch 194/250 180/180 [==============================] - 11s 59ms/step - loss: 0.6837 - accuracy: 0.7445 - val_loss: 1.2349 - val_accuracy: 0.5976 Epoch 195/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6828 - accuracy: 0.7459 - val_loss: 1.2349 - val_accuracy: 0.5976 Epoch 196/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6858 - accuracy: 0.7395 - val_loss: 1.2349 - val_accuracy: 0.5976 Epoch 197/250 180/180 [==============================] - 11s 59ms/step - loss: 0.6791 - accuracy: 0.7446 - val_loss: 1.2349 - val_accuracy: 0.5976 Epoch 198/250 180/180 [==============================] - 11s 60ms/step - loss: 0.6783 - accuracy: 0.7467 - val_loss: 1.2349 - val_accuracy: 0.5976 Epoch 199/250 180/180 [==============================] - 11s 59ms/step - loss: 0.6800 - accuracy: 0.7409 - val_loss: 1.2349 - val_accuracy: 0.5976 Epoch 200/250 180/180 [==============================] - 11s 59ms/step - loss: 0.6813 - accuracy: 0.7444 - val_loss: 1.2350 - val_accuracy: 0.5976 Epoch 201/250 180/180 [==============================] - 11s 60ms/step - loss: 0.6829 - accuracy: 0.7442 - val_loss: 1.2350 - val_accuracy: 0.5976 Epoch 202/250 180/180 [==============================] - 11s 58ms/step - loss: 0.6809 - accuracy: 0.7408 - val_loss: 1.2349 - val_accuracy: 0.5976 Epoch 203/250 180/180 [==============================] - 11s 58ms/step - loss: 0.6839 - accuracy: 0.7415 - val_loss: 1.2349 - val_accuracy: 0.5976 Epoch 204/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6800 - accuracy: 0.7411 - val_loss: 1.2349 - val_accuracy: 0.5976 Epoch 205/250 180/180 [==============================] - 11s 58ms/step - loss: 0.6832 - accuracy: 0.7417 - val_loss: 1.2349 - val_accuracy: 0.5976 Epoch 206/250 180/180 [==============================] - 11s 59ms/step - loss: 0.6856 - accuracy: 0.7440 - val_loss: 1.2349 - val_accuracy: 0.5976 Epoch 207/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6897 - accuracy: 0.7363 - val_loss: 1.2348 - val_accuracy: 0.5976 Epoch 208/250 180/180 [==============================] - 11s 58ms/step - loss: 0.6852 - accuracy: 0.7433 - val_loss: 1.2347 - val_accuracy: 0.5976 Epoch 209/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6764 - accuracy: 0.7453 - val_loss: 1.2348 - val_accuracy: 0.5976 Epoch 210/250 180/180 [==============================] - 11s 60ms/step - loss: 0.6822 - accuracy: 0.7394 - val_loss: 1.2348 - val_accuracy: 0.5976 Epoch 211/250 180/180 [==============================] - 11s 59ms/step - loss: 0.6772 - accuracy: 0.7439 - val_loss: 1.2348 - val_accuracy: 0.5979 Epoch 212/250 180/180 [==============================] - 10s 58ms/step - loss: 0.6815 - accuracy: 0.7436 - val_loss: 1.2347 - val_accuracy: 0.5979 Epoch 213/250 180/180 [==============================] - 11s 59ms/step - loss: 0.6806 - accuracy: 0.7449 - val_loss: 1.2347 - val_accuracy: 0.5976 Epoch 214/250 180/180 [==============================] - 11s 58ms/step - loss: 0.6823 - accuracy: 0.7395 - val_loss: 1.2347 - val_accuracy: 0.5976 Epoch 215/250 180/180 [==============================] - 12s 65ms/step - loss: 0.6763 - accuracy: 0.7465 - val_loss: 1.2347 - val_accuracy: 0.5976 Epoch 216/250 180/180 [==============================] - 12s 66ms/step - loss: 0.6832 - accuracy: 0.7426 - val_loss: 1.2347 - val_accuracy: 0.5976 Epoch 217/250 180/180 [==============================] - 12s 69ms/step - loss: 0.6828 - accuracy: 0.7416 - val_loss: 1.2347 - val_accuracy: 0.5972 Epoch 218/250 180/180 [==============================] - 12s 67ms/step - loss: 0.6806 - accuracy: 0.7426 - val_loss: 1.2347 - val_accuracy: 0.5972 Epoch 219/250 180/180 [==============================] - 12s 66ms/step - loss: 0.6795 - accuracy: 0.7417 - val_loss: 1.2348 - val_accuracy: 0.5972 Epoch 220/250 180/180 [==============================] - 12s 66ms/step - loss: 0.6840 - accuracy: 0.7427 - val_loss: 1.2348 - val_accuracy: 0.5976 Epoch 221/250 180/180 [==============================] - 12s 66ms/step - loss: 0.6781 - accuracy: 0.7456 - val_loss: 1.2348 - val_accuracy: 0.5976 Epoch 222/250 180/180 [==============================] - 12s 67ms/step - loss: 0.6756 - accuracy: 0.7457 - val_loss: 1.2349 - val_accuracy: 0.5976 Epoch 223/250 180/180 [==============================] - 12s 66ms/step - loss: 0.6842 - accuracy: 0.7410 - val_loss: 1.2349 - val_accuracy: 0.5979 Epoch 224/250 180/180 [==============================] - 12s 66ms/step - loss: 0.6825 - accuracy: 0.7442 - val_loss: 1.2349 - val_accuracy: 0.5976 Epoch 225/250 180/180 [==============================] - 12s 66ms/step - loss: 0.6807 - accuracy: 0.7410 - val_loss: 1.2348 - val_accuracy: 0.5976 Epoch 226/250 180/180 [==============================] - 12s 65ms/step - loss: 0.6811 - accuracy: 0.7400 - val_loss: 1.2349 - val_accuracy: 0.5979 Epoch 227/250 180/180 [==============================] - 12s 68ms/step - loss: 0.6812 - accuracy: 0.7436 - val_loss: 1.2348 - val_accuracy: 0.5979 Epoch 228/250 180/180 [==============================] - 12s 66ms/step - loss: 0.6756 - accuracy: 0.7446 - val_loss: 1.2349 - val_accuracy: 0.5983 Epoch 229/250 180/180 [==============================] - 12s 65ms/step - loss: 0.6817 - accuracy: 0.7420 - val_loss: 1.2348 - val_accuracy: 0.5979 Epoch 230/250 180/180 [==============================] - 12s 66ms/step - loss: 0.6796 - accuracy: 0.7436 - val_loss: 1.2349 - val_accuracy: 0.5979 Epoch 231/250 180/180 [==============================] - 12s 67ms/step - loss: 0.6782 - accuracy: 0.7424 - val_loss: 1.2349 - val_accuracy: 0.5979 Epoch 232/250 180/180 [==============================] - 12s 67ms/step - loss: 0.6874 - accuracy: 0.7431 - val_loss: 1.2349 - val_accuracy: 0.5983 Epoch 233/250 180/180 [==============================] - 12s 67ms/step - loss: 0.6790 - accuracy: 0.7466 - val_loss: 1.2349 - val_accuracy: 0.5983 Epoch 234/250 180/180 [==============================] - 12s 66ms/step - loss: 0.6835 - accuracy: 0.7393 - val_loss: 1.2349 - val_accuracy: 0.5979 Epoch 235/250 180/180 [==============================] - 12s 67ms/step - loss: 0.6816 - accuracy: 0.7416 - val_loss: 1.2349 - val_accuracy: 0.5979 Epoch 236/250 180/180 [==============================] - 12s 69ms/step - loss: 0.6839 - accuracy: 0.7412 - val_loss: 1.2349 - val_accuracy: 0.5979 Epoch 237/250 180/180 [==============================] - 12s 67ms/step - loss: 0.6809 - accuracy: 0.7443 - val_loss: 1.2349 - val_accuracy: 0.5976 Epoch 238/250 180/180 [==============================] - 12s 65ms/step - loss: 0.6803 - accuracy: 0.7450 - val_loss: 1.2349 - val_accuracy: 0.5976 Epoch 239/250 180/180 [==============================] - 12s 65ms/step - loss: 0.6779 - accuracy: 0.7447 - val_loss: 1.2349 - val_accuracy: 0.5972 Epoch 240/250 180/180 [==============================] - 12s 65ms/step - loss: 0.6863 - accuracy: 0.7412 - val_loss: 1.2348 - val_accuracy: 0.5972 Epoch 241/250 180/180 [==============================] - 12s 66ms/step - loss: 0.6776 - accuracy: 0.7438 - val_loss: 1.2349 - val_accuracy: 0.5976 Epoch 242/250 180/180 [==============================] - 12s 65ms/step - loss: 0.6888 - accuracy: 0.7400 - val_loss: 1.2349 - val_accuracy: 0.5972 Epoch 243/250 180/180 [==============================] - 12s 67ms/step - loss: 0.6829 - accuracy: 0.7436 - val_loss: 1.2349 - val_accuracy: 0.5972 Epoch 244/250 180/180 [==============================] - 12s 66ms/step - loss: 0.6800 - accuracy: 0.7410 - val_loss: 1.2350 - val_accuracy: 0.5976 Epoch 245/250 180/180 [==============================] - 12s 66ms/step - loss: 0.6773 - accuracy: 0.7441 - val_loss: 1.2350 - val_accuracy: 0.5976 Epoch 246/250 180/180 [==============================] - 12s 66ms/step - loss: 0.6795 - accuracy: 0.7429 - val_loss: 1.2350 - val_accuracy: 0.5976 Epoch 247/250 180/180 [==============================] - 12s 67ms/step - loss: 0.6791 - accuracy: 0.7451 - val_loss: 1.2350 - val_accuracy: 0.5972 Epoch 248/250 180/180 [==============================] - 12s 66ms/step - loss: 0.6807 - accuracy: 0.7398 - val_loss: 1.2349 - val_accuracy: 0.5972 Epoch 249/250 180/180 [==============================] - 12s 65ms/step - loss: 0.6814 - accuracy: 0.7445 - val_loss: 1.2349 - val_accuracy: 0.5972 Epoch 250/250 180/180 [==============================] - 12s 66ms/step - loss: 0.6803 - accuracy: 0.7448 - val_loss: 1.2349 - val_accuracy: 0.5976
plt.style.use('fivethirtyeight')
epochs = [i for i in range(250)]
fig , ax = plt.subplots(1,2)
train_acc = history_1_e.history['accuracy']
train_loss = history_1_e.history['loss']
test_acc = history_1_e.history['val_accuracy']
test_loss = history_1_e.history['val_loss']
fig.set_size_inches(20,6)
ax[0].plot(epochs , train_loss , label = 'Training Loss')
ax[0].plot(epochs , test_loss , label = 'Testing Loss')
ax[0].set_title('Training & Testing Loss')
ax[0].legend()
ax[0].set_xlabel("Epochs")
ax[1].plot(epochs , train_acc , label = 'Training Accuracy')
ax[1].plot(epochs , test_acc , label = 'Testing Accuracy')
ax[1].set_title('Training & Testing Accuracy')
ax[1].legend()
ax[1].set_xlabel("Epochs")
plt.show()
# Save Model
model_1_e.save('Trained Models/model_1_e.h5')
# Train Machine Learning Model
model_2_g = MLPClassifier(random_state=1, max_iter=1000, verbose = 1)
# Fit Model
model_2_g.fit(X_train, y1_train)
# Get Predictions
predictions_g = model_2_g.predict(X_test)
Iteration 1, loss = 0.68545060 Iteration 2, loss = 0.66403813 Iteration 3, loss = 0.64105889 Iteration 4, loss = 0.61540173 Iteration 5, loss = 0.58821098 Iteration 6, loss = 0.56570886 Iteration 7, loss = 0.54779540 Iteration 8, loss = 0.53371671 Iteration 9, loss = 0.52473230 Iteration 10, loss = 0.51317951 Iteration 11, loss = 0.50614389 Iteration 12, loss = 0.49864088 Iteration 13, loss = 0.49164075 Iteration 14, loss = 0.48437159 Iteration 15, loss = 0.47808643 Iteration 16, loss = 0.47163235 Iteration 17, loss = 0.46648278 Iteration 18, loss = 0.46041894 Iteration 19, loss = 0.45719581 Iteration 20, loss = 0.45156460 Iteration 21, loss = 0.44956813 Iteration 22, loss = 0.44376280 Iteration 23, loss = 0.44021554 Iteration 24, loss = 0.43719173 Iteration 25, loss = 0.43271986 Iteration 26, loss = 0.42988026 Iteration 27, loss = 0.42889944 Iteration 28, loss = 0.42409330 Iteration 29, loss = 0.42224804 Iteration 30, loss = 0.41987162 Iteration 31, loss = 0.41745159 Iteration 32, loss = 0.41531324 Iteration 33, loss = 0.41352215 Iteration 34, loss = 0.41180485 Iteration 35, loss = 0.40942911 Iteration 36, loss = 0.40789101 Iteration 37, loss = 0.40691692 Iteration 38, loss = 0.40416612 Iteration 39, loss = 0.40425108 Iteration 40, loss = 0.40095546 Iteration 41, loss = 0.40183769 Iteration 42, loss = 0.40038414 Iteration 43, loss = 0.39751938 Iteration 44, loss = 0.39564239 Iteration 45, loss = 0.39427998 Iteration 46, loss = 0.39361220 Iteration 47, loss = 0.39132423 Iteration 48, loss = 0.39159446 Iteration 49, loss = 0.38964175 Iteration 50, loss = 0.38754630 Iteration 51, loss = 0.38605583 Iteration 52, loss = 0.38503564 Iteration 53, loss = 0.38385837 Iteration 54, loss = 0.38223083 Iteration 55, loss = 0.38139702 Iteration 56, loss = 0.38024500 Iteration 57, loss = 0.37966565 Iteration 58, loss = 0.37708043 Iteration 59, loss = 0.37556536 Iteration 60, loss = 0.37733444 Iteration 61, loss = 0.37456084 Iteration 62, loss = 0.37254474 Iteration 63, loss = 0.37251673 Iteration 64, loss = 0.36985972 Iteration 65, loss = 0.37094952 Iteration 66, loss = 0.36734136 Iteration 67, loss = 0.36638162 Iteration 68, loss = 0.36606176 Iteration 69, loss = 0.36514936 Iteration 70, loss = 0.36370135 Iteration 71, loss = 0.36287168 Iteration 72, loss = 0.36063300 Iteration 73, loss = 0.36082901 Iteration 74, loss = 0.35911032 Iteration 75, loss = 0.35939621 Iteration 76, loss = 0.35728600 Iteration 77, loss = 0.35620197 Iteration 78, loss = 0.35556744 Iteration 79, loss = 0.35653196 Iteration 80, loss = 0.35540086 Iteration 81, loss = 0.35363788 Iteration 82, loss = 0.35121546 Iteration 83, loss = 0.35238921 Iteration 84, loss = 0.35083660 Iteration 85, loss = 0.34942843 Iteration 86, loss = 0.34977030 Iteration 87, loss = 0.34703039 Iteration 88, loss = 0.34892762 Iteration 89, loss = 0.34682662 Iteration 90, loss = 0.34507205 Iteration 91, loss = 0.34391715 Iteration 92, loss = 0.34351906 Iteration 93, loss = 0.34413493 Iteration 94, loss = 0.34222996 Iteration 95, loss = 0.34197234 Iteration 96, loss = 0.34090119 Iteration 97, loss = 0.34149067 Iteration 98, loss = 0.34013978 Iteration 99, loss = 0.33893077 Iteration 100, loss = 0.33779030 Iteration 101, loss = 0.33693899 Iteration 102, loss = 0.33674940 Iteration 103, loss = 0.33564251 Iteration 104, loss = 0.33421616 Iteration 105, loss = 0.33529783 Iteration 106, loss = 0.33484526 Iteration 107, loss = 0.33395061 Iteration 108, loss = 0.33377571 Iteration 109, loss = 0.33280486 Iteration 110, loss = 0.33113806 Iteration 111, loss = 0.33183981 Iteration 112, loss = 0.32963974 Iteration 113, loss = 0.32921879 Iteration 114, loss = 0.32926910 Iteration 115, loss = 0.33053693 Iteration 116, loss = 0.32803684 Iteration 117, loss = 0.32632999 Iteration 118, loss = 0.33046806 Iteration 119, loss = 0.32687029 Iteration 120, loss = 0.32462545 Iteration 121, loss = 0.32728827 Iteration 122, loss = 0.32495347 Iteration 123, loss = 0.32386194 Iteration 124, loss = 0.32717545 Iteration 125, loss = 0.32312304 Iteration 126, loss = 0.32282436 Iteration 127, loss = 0.32207485 Iteration 128, loss = 0.32203446 Iteration 129, loss = 0.32139738 Iteration 130, loss = 0.32065401 Iteration 131, loss = 0.32039828 Iteration 132, loss = 0.32190873 Iteration 133, loss = 0.32065445 Iteration 134, loss = 0.31858303 Iteration 135, loss = 0.31866783 Iteration 136, loss = 0.31828302 Iteration 137, loss = 0.31697869 Iteration 138, loss = 0.31679799 Iteration 139, loss = 0.31827800 Iteration 140, loss = 0.31989161 Iteration 141, loss = 0.31747781 Iteration 142, loss = 0.31538563 Iteration 143, loss = 0.31613712 Iteration 144, loss = 0.31546212 Iteration 145, loss = 0.31552405 Iteration 146, loss = 0.31379244 Iteration 147, loss = 0.31323599 Iteration 148, loss = 0.31290514 Iteration 149, loss = 0.31222230 Iteration 150, loss = 0.31155338 Iteration 151, loss = 0.31300248 Iteration 152, loss = 0.31246412 Iteration 153, loss = 0.31037383 Iteration 154, loss = 0.31137879 Iteration 155, loss = 0.31088758 Iteration 156, loss = 0.31029340 Iteration 157, loss = 0.30962448 Iteration 158, loss = 0.30970890 Iteration 159, loss = 0.30920635 Iteration 160, loss = 0.30907650 Iteration 161, loss = 0.31018020 Iteration 162, loss = 0.30835131 Iteration 163, loss = 0.31061860 Iteration 164, loss = 0.30841552 Iteration 165, loss = 0.30704502 Iteration 166, loss = 0.30598669 Iteration 167, loss = 0.30816417 Iteration 168, loss = 0.30784747 Iteration 169, loss = 0.30650586 Iteration 170, loss = 0.30590892 Iteration 171, loss = 0.30678290 Iteration 172, loss = 0.30755460 Iteration 173, loss = 0.30453693 Iteration 174, loss = 0.30744193 Iteration 175, loss = 0.30408342 Iteration 176, loss = 0.30313162 Iteration 177, loss = 0.30606056 Iteration 178, loss = 0.30379986 Iteration 179, loss = 0.30251754 Iteration 180, loss = 0.30414102 Iteration 181, loss = 0.30300434 Iteration 182, loss = 0.30211115 Iteration 183, loss = 0.30093296 Iteration 184, loss = 0.30044960 Iteration 185, loss = 0.30190833 Iteration 186, loss = 0.30051653 Iteration 187, loss = 0.30062758 Iteration 188, loss = 0.30096836 Iteration 189, loss = 0.30049225 Iteration 190, loss = 0.30119272 Iteration 191, loss = 0.29927779 Iteration 192, loss = 0.29914693 Iteration 193, loss = 0.29849585 Iteration 194, loss = 0.29933143 Iteration 195, loss = 0.30334656 Iteration 196, loss = 0.29752042 Iteration 197, loss = 0.29830018 Iteration 198, loss = 0.29809616 Iteration 199, loss = 0.29876047 Iteration 200, loss = 0.29802400 Iteration 201, loss = 0.30112385 Iteration 202, loss = 0.29631260 Iteration 203, loss = 0.29618653 Iteration 204, loss = 0.29692855 Iteration 205, loss = 0.29558423 Iteration 206, loss = 0.29508700 Iteration 207, loss = 0.29518416 Iteration 208, loss = 0.29502758 Iteration 209, loss = 0.29584615 Iteration 210, loss = 0.29589851 Iteration 211, loss = 0.29538004 Iteration 212, loss = 0.29961526 Iteration 213, loss = 0.29415483 Iteration 214, loss = 0.29317311 Iteration 215, loss = 0.29367050 Iteration 216, loss = 0.29364078 Iteration 217, loss = 0.29460384 Iteration 218, loss = 0.29221298 Iteration 219, loss = 0.29445446 Iteration 220, loss = 0.29210504 Iteration 221, loss = 0.29262084 Iteration 222, loss = 0.29284936 Iteration 223, loss = 0.29281869 Iteration 224, loss = 0.29178851 Iteration 225, loss = 0.29059993 Iteration 226, loss = 0.29266405 Iteration 227, loss = 0.29078190 Iteration 228, loss = 0.29111649 Iteration 229, loss = 0.28946811 Iteration 230, loss = 0.28997903 Iteration 231, loss = 0.28915982 Iteration 232, loss = 0.28966901 Iteration 233, loss = 0.28976158 Iteration 234, loss = 0.29073853 Iteration 235, loss = 0.28947540 Iteration 236, loss = 0.29015194 Iteration 237, loss = 0.28995726 Iteration 238, loss = 0.28869927 Iteration 239, loss = 0.28775450 Iteration 240, loss = 0.28873529 Iteration 241, loss = 0.28769609 Iteration 242, loss = 0.28726499 Iteration 243, loss = 0.28664944 Iteration 244, loss = 0.28801823 Iteration 245, loss = 0.28707521 Iteration 246, loss = 0.28594788 Iteration 247, loss = 0.28650747 Iteration 248, loss = 0.28754959 Iteration 249, loss = 0.28627193 Iteration 250, loss = 0.28816995 Iteration 251, loss = 0.28789637 Iteration 252, loss = 0.28674549 Iteration 253, loss = 0.28629648 Iteration 254, loss = 0.28708476 Iteration 255, loss = 0.28655632 Iteration 256, loss = 0.28695280 Iteration 257, loss = 0.28545704 Iteration 258, loss = 0.28593444 Iteration 259, loss = 0.28584157 Iteration 260, loss = 0.28408254 Iteration 261, loss = 0.28766354 Iteration 262, loss = 0.28293393 Iteration 263, loss = 0.28287330 Iteration 264, loss = 0.28237934 Iteration 265, loss = 0.28233598 Iteration 266, loss = 0.28425460 Iteration 267, loss = 0.28290535 Iteration 268, loss = 0.28178322 Iteration 269, loss = 0.28374227 Iteration 270, loss = 0.28404675 Iteration 271, loss = 0.28224440 Iteration 272, loss = 0.28078677 Iteration 273, loss = 0.28197992 Iteration 274, loss = 0.28228120 Iteration 275, loss = 0.28008022 Iteration 276, loss = 0.28361934 Iteration 277, loss = 0.28075823 Iteration 278, loss = 0.28066251 Iteration 279, loss = 0.28055708 Iteration 280, loss = 0.27907189 Iteration 281, loss = 0.28006013 Iteration 282, loss = 0.27889863 Iteration 283, loss = 0.28014281 Iteration 284, loss = 0.28197810 Iteration 285, loss = 0.27809658 Iteration 286, loss = 0.27752259 Iteration 287, loss = 0.27964906 Iteration 288, loss = 0.28130973 Iteration 289, loss = 0.27853650 Iteration 290, loss = 0.27803913 Iteration 291, loss = 0.27975718 Iteration 292, loss = 0.28142729 Iteration 293, loss = 0.27764616 Iteration 294, loss = 0.27778564 Iteration 295, loss = 0.27669177 Iteration 296, loss = 0.27806292 Iteration 297, loss = 0.27856005 Iteration 298, loss = 0.27718520 Iteration 299, loss = 0.27590785 Iteration 300, loss = 0.27857028 Iteration 301, loss = 0.27628494 Iteration 302, loss = 0.27664932 Iteration 303, loss = 0.27683171 Iteration 304, loss = 0.27840586 Iteration 305, loss = 0.27578133 Iteration 306, loss = 0.27868569 Iteration 307, loss = 0.27512773 Iteration 308, loss = 0.27466956 Iteration 309, loss = 0.27469481 Iteration 310, loss = 0.27381420 Iteration 311, loss = 0.27409125 Iteration 312, loss = 0.27561841 Iteration 313, loss = 0.27355865 Iteration 314, loss = 0.27442289 Iteration 315, loss = 0.27521134 Iteration 316, loss = 0.27289082 Iteration 317, loss = 0.27489882 Iteration 318, loss = 0.27415604 Iteration 319, loss = 0.27419641 Iteration 320, loss = 0.27263554 Iteration 321, loss = 0.27407273 Iteration 322, loss = 0.27205621 Iteration 323, loss = 0.27256592 Iteration 324, loss = 0.27354171 Iteration 325, loss = 0.27379613 Iteration 326, loss = 0.27315305 Iteration 327, loss = 0.27254657 Iteration 328, loss = 0.27135290 Iteration 329, loss = 0.27357834 Iteration 330, loss = 0.27294523 Iteration 331, loss = 0.27643225 Iteration 332, loss = 0.27049574 Iteration 333, loss = 0.27275791 Iteration 334, loss = 0.27132834 Iteration 335, loss = 0.27061755 Iteration 336, loss = 0.27005250 Iteration 337, loss = 0.26963795 Iteration 338, loss = 0.27349298 Iteration 339, loss = 0.26990577 Iteration 340, loss = 0.26949158 Iteration 341, loss = 0.26912918 Iteration 342, loss = 0.27155276 Iteration 343, loss = 0.27057566 Iteration 344, loss = 0.26870079 Iteration 345, loss = 0.26894737 Iteration 346, loss = 0.27174656 Iteration 347, loss = 0.27109037 Iteration 348, loss = 0.26901298 Iteration 349, loss = 0.26789396 Iteration 350, loss = 0.27112682 Iteration 351, loss = 0.26963600 Iteration 352, loss = 0.26760224 Iteration 353, loss = 0.26715203 Iteration 354, loss = 0.26765620 Iteration 355, loss = 0.26675996 Iteration 356, loss = 0.26725526 Iteration 357, loss = 0.26638038 Iteration 358, loss = 0.26636345 Iteration 359, loss = 0.26795113 Iteration 360, loss = 0.26778753 Iteration 361, loss = 0.26608292 Iteration 362, loss = 0.26550622 Iteration 363, loss = 0.26671366 Iteration 364, loss = 0.26582484 Iteration 365, loss = 0.26682817 Iteration 366, loss = 0.26481787 Iteration 367, loss = 0.26749739 Iteration 368, loss = 0.26601408 Iteration 369, loss = 0.26568802 Iteration 370, loss = 0.26590366 Iteration 371, loss = 0.26548627 Iteration 372, loss = 0.26576595 Iteration 373, loss = 0.26560218 Iteration 374, loss = 0.26350080 Iteration 375, loss = 0.26744804 Iteration 376, loss = 0.26530305 Iteration 377, loss = 0.26615697 Iteration 378, loss = 0.26321827 Iteration 379, loss = 0.26423885 Iteration 380, loss = 0.26294360 Iteration 381, loss = 0.26414442 Iteration 382, loss = 0.26515077 Iteration 383, loss = 0.26810331 Iteration 384, loss = 0.26366263 Iteration 385, loss = 0.26164957 Iteration 386, loss = 0.26252536 Iteration 387, loss = 0.26336370 Iteration 388, loss = 0.26331210 Iteration 389, loss = 0.26601602 Iteration 390, loss = 0.26270712 Iteration 391, loss = 0.26330418 Iteration 392, loss = 0.26300323 Iteration 393, loss = 0.26375861 Iteration 394, loss = 0.26326015 Iteration 395, loss = 0.26464611 Iteration 396, loss = 0.26071683 Iteration 397, loss = 0.26020213 Iteration 398, loss = 0.26217376 Iteration 399, loss = 0.26317781 Iteration 400, loss = 0.26035023 Iteration 401, loss = 0.26199418 Iteration 402, loss = 0.26094176 Iteration 403, loss = 0.25975127 Iteration 404, loss = 0.26101667 Iteration 405, loss = 0.26060100 Iteration 406, loss = 0.26175467 Iteration 407, loss = 0.26020964 Iteration 408, loss = 0.26243017 Iteration 409, loss = 0.26014045 Iteration 410, loss = 0.26067422 Iteration 411, loss = 0.25999930 Iteration 412, loss = 0.25886525 Iteration 413, loss = 0.25829127 Iteration 414, loss = 0.25789233 Iteration 415, loss = 0.25874374 Iteration 416, loss = 0.25878686 Iteration 417, loss = 0.25834193 Iteration 418, loss = 0.25944346 Iteration 419, loss = 0.25805646 Iteration 420, loss = 0.25906836 Iteration 421, loss = 0.25727401 Iteration 422, loss = 0.25822978 Iteration 423, loss = 0.25942484 Iteration 424, loss = 0.25806960 Iteration 425, loss = 0.25825270 Iteration 426, loss = 0.25836303 Iteration 427, loss = 0.25781698 Iteration 428, loss = 0.25908743 Iteration 429, loss = 0.25697849 Iteration 430, loss = 0.25897334 Iteration 431, loss = 0.25708096 Iteration 432, loss = 0.25887287 Iteration 433, loss = 0.25709459 Iteration 434, loss = 0.25975052 Iteration 435, loss = 0.25912355 Iteration 436, loss = 0.25836193 Iteration 437, loss = 0.25694448 Iteration 438, loss = 0.25695460 Iteration 439, loss = 0.25740304 Iteration 440, loss = 0.25768498 Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.
# Predictions
y_pred = predictions_g
y = y1_test
# Accuracy Score
accuracy_2_g = accuracy_score(y, y_pred)
print("\n\n======================")
print("Accuracy Score:")
print("======================\n\n")
print(" Accuracy: ", round(accuracy_2_g*100,2), "%")
# Classification Report
report = classification_report(y, y_pred)
print("\n\n======================")
print("Classification Report:")
print("======================\n\n")
print(report)
# Confusion Matrix
matrix = confusion_matrix(y, y_pred)
print("\n\n=================")
print("Confusion Matrix:")
print("=================\n\n")
print(matrix)
# Heat Map
print("\n\n===============================")
print("Confusion Matrix with Heat MAP:")
print("===============================\n\n")
confusion_matrix_heatmap = confusion_matrix(y, y_pred, normalize = 'true')
sns.set(rc={'figure.figsize':(10,7)})
sns.heatmap(confusion_matrix_heatmap, annot=True)
plt.show()
======================
Accuracy Score:
======================
Accuracy: 88.78 %
======================
Classification Report:
======================
precision recall f1-score support
0 0.90 0.85 0.88 1351
1 0.88 0.92 0.90 1519
accuracy 0.89 2870
macro avg 0.89 0.89 0.89 2870
weighted avg 0.89 0.89 0.89 2870
=================
Confusion Matrix:
=================
[[1153 198]
[ 124 1395]]
===============================
Confusion Matrix with Heat MAP:
===============================
# Save Model
filename = 'Trained Models/model_2_g.sav'
pickle.dump(model_2_g, open(filename, 'wb'))
# Train Machine Learning Model
model_2_e = MLPClassifier(random_state=1, max_iter=1000, verbose = 1)
# Fit Model
model_2_e.fit(X_train, y2_train)
# Get Predictions
predictions_e = model_2_e.predict(X_test)
Iteration 1, loss = 1.89671721 Iteration 2, loss = 1.82838932 Iteration 3, loss = 1.78508256 Iteration 4, loss = 1.75527074 Iteration 5, loss = 1.72921013 Iteration 6, loss = 1.70185430 Iteration 7, loss = 1.67743185 Iteration 8, loss = 1.65244562 Iteration 9, loss = 1.63125755 Iteration 10, loss = 1.61144297 Iteration 11, loss = 1.59523934 Iteration 12, loss = 1.58047752 Iteration 13, loss = 1.56822299 Iteration 14, loss = 1.55848348 Iteration 15, loss = 1.54864857 Iteration 16, loss = 1.54018300 Iteration 17, loss = 1.53298682 Iteration 18, loss = 1.52643167 Iteration 19, loss = 1.52011458 Iteration 20, loss = 1.51800398 Iteration 21, loss = 1.51000672 Iteration 22, loss = 1.50717873 Iteration 23, loss = 1.50069036 Iteration 24, loss = 1.49606655 Iteration 25, loss = 1.49647367 Iteration 26, loss = 1.48886166 Iteration 27, loss = 1.48546604 Iteration 28, loss = 1.48303516 Iteration 29, loss = 1.48002390 Iteration 30, loss = 1.47618641 Iteration 31, loss = 1.47267729 Iteration 32, loss = 1.47028993 Iteration 33, loss = 1.46694007 Iteration 34, loss = 1.46322304 Iteration 35, loss = 1.46063423 Iteration 36, loss = 1.45911075 Iteration 37, loss = 1.45399962 Iteration 38, loss = 1.45305205 Iteration 39, loss = 1.45004669 Iteration 40, loss = 1.44761376 Iteration 41, loss = 1.44454316 Iteration 42, loss = 1.44346255 Iteration 43, loss = 1.44257192 Iteration 44, loss = 1.43913709 Iteration 45, loss = 1.43582926 Iteration 46, loss = 1.43284349 Iteration 47, loss = 1.42986401 Iteration 48, loss = 1.42668529 Iteration 49, loss = 1.42477340 Iteration 50, loss = 1.42323895 Iteration 51, loss = 1.42032407 Iteration 52, loss = 1.41895959 Iteration 53, loss = 1.41494713 Iteration 54, loss = 1.41320794 Iteration 55, loss = 1.41230107 Iteration 56, loss = 1.40918947 Iteration 57, loss = 1.40775548 Iteration 58, loss = 1.40542308 Iteration 59, loss = 1.40251485 Iteration 60, loss = 1.40228089 Iteration 61, loss = 1.39843765 Iteration 62, loss = 1.39708621 Iteration 63, loss = 1.39548203 Iteration 64, loss = 1.39321323 Iteration 65, loss = 1.39061955 Iteration 66, loss = 1.38950235 Iteration 67, loss = 1.38857762 Iteration 68, loss = 1.38512346 Iteration 69, loss = 1.38396016 Iteration 70, loss = 1.38282129 Iteration 71, loss = 1.37935535 Iteration 72, loss = 1.37955418 Iteration 73, loss = 1.37658566 Iteration 74, loss = 1.37484983 Iteration 75, loss = 1.37424588 Iteration 76, loss = 1.37242719 Iteration 77, loss = 1.37039119 Iteration 78, loss = 1.37044657 Iteration 79, loss = 1.36756660 Iteration 80, loss = 1.36473694 Iteration 81, loss = 1.36375729 Iteration 82, loss = 1.36319128 Iteration 83, loss = 1.36019651 Iteration 84, loss = 1.35952939 Iteration 85, loss = 1.35832857 Iteration 86, loss = 1.35693654 Iteration 87, loss = 1.35521279 Iteration 88, loss = 1.35426675 Iteration 89, loss = 1.35198499 Iteration 90, loss = 1.34981358 Iteration 91, loss = 1.35036229 Iteration 92, loss = 1.34821539 Iteration 93, loss = 1.34615229 Iteration 94, loss = 1.34553660 Iteration 95, loss = 1.34549412 Iteration 96, loss = 1.34294746 Iteration 97, loss = 1.34105041 Iteration 98, loss = 1.34148740 Iteration 99, loss = 1.34118509 Iteration 100, loss = 1.33722656 Iteration 101, loss = 1.33685988 Iteration 102, loss = 1.33456069 Iteration 103, loss = 1.33544055 Iteration 104, loss = 1.33440820 Iteration 105, loss = 1.33292274 Iteration 106, loss = 1.33090809 Iteration 107, loss = 1.32829254 Iteration 108, loss = 1.32818936 Iteration 109, loss = 1.32948254 Iteration 110, loss = 1.32550642 Iteration 111, loss = 1.32501808 Iteration 112, loss = 1.32224384 Iteration 113, loss = 1.32283543 Iteration 114, loss = 1.32162490 Iteration 115, loss = 1.32090983 Iteration 116, loss = 1.31902674 Iteration 117, loss = 1.31914771 Iteration 118, loss = 1.31821048 Iteration 119, loss = 1.31561116 Iteration 120, loss = 1.31371716 Iteration 121, loss = 1.31344684 Iteration 122, loss = 1.31173053 Iteration 123, loss = 1.31470053 Iteration 124, loss = 1.31014877 Iteration 125, loss = 1.30896639 Iteration 126, loss = 1.30763816 Iteration 127, loss = 1.30618150 Iteration 128, loss = 1.30547256 Iteration 129, loss = 1.30487162 Iteration 130, loss = 1.30490110 Iteration 131, loss = 1.30708458 Iteration 132, loss = 1.30204495 Iteration 133, loss = 1.30211640 Iteration 134, loss = 1.30052343 Iteration 135, loss = 1.29888110 Iteration 136, loss = 1.29816681 Iteration 137, loss = 1.29788588 Iteration 138, loss = 1.29495660 Iteration 139, loss = 1.29685507 Iteration 140, loss = 1.29497007 Iteration 141, loss = 1.29410158 Iteration 142, loss = 1.29256294 Iteration 143, loss = 1.29029620 Iteration 144, loss = 1.29174241 Iteration 145, loss = 1.28900096 Iteration 146, loss = 1.28868452 Iteration 147, loss = 1.28887653 Iteration 148, loss = 1.28759969 Iteration 149, loss = 1.28711566 Iteration 150, loss = 1.28528970 Iteration 151, loss = 1.28510253 Iteration 152, loss = 1.28503910 Iteration 153, loss = 1.28246456 Iteration 154, loss = 1.28219867 Iteration 155, loss = 1.28261519 Iteration 156, loss = 1.27984702 Iteration 157, loss = 1.27879351 Iteration 158, loss = 1.27729531 Iteration 159, loss = 1.27641742 Iteration 160, loss = 1.27711746 Iteration 161, loss = 1.27461240 Iteration 162, loss = 1.27568434 Iteration 163, loss = 1.27381592 Iteration 164, loss = 1.27274832 Iteration 165, loss = 1.27106840 Iteration 166, loss = 1.27074565 Iteration 167, loss = 1.26905149 Iteration 168, loss = 1.26966710 Iteration 169, loss = 1.26964126 Iteration 170, loss = 1.26748163 Iteration 171, loss = 1.26664001 Iteration 172, loss = 1.26612448 Iteration 173, loss = 1.26519276 Iteration 174, loss = 1.26492219 Iteration 175, loss = 1.26496464 Iteration 176, loss = 1.26343738 Iteration 177, loss = 1.26123325 Iteration 178, loss = 1.26201987 Iteration 179, loss = 1.26003148 Iteration 180, loss = 1.26028838 Iteration 181, loss = 1.25938249 Iteration 182, loss = 1.25869495 Iteration 183, loss = 1.25803916 Iteration 184, loss = 1.25563281 Iteration 185, loss = 1.25533125 Iteration 186, loss = 1.25364379 Iteration 187, loss = 1.25446697 Iteration 188, loss = 1.25628033 Iteration 189, loss = 1.25417560 Iteration 190, loss = 1.25236682 Iteration 191, loss = 1.25102829 Iteration 192, loss = 1.25140118 Iteration 193, loss = 1.25104118 Iteration 194, loss = 1.24837855 Iteration 195, loss = 1.24741031 Iteration 196, loss = 1.24668994 Iteration 197, loss = 1.24744446 Iteration 198, loss = 1.24514084 Iteration 199, loss = 1.24529432 Iteration 200, loss = 1.24327772 Iteration 201, loss = 1.24308059 Iteration 202, loss = 1.24420897 Iteration 203, loss = 1.24570915 Iteration 204, loss = 1.24147025 Iteration 205, loss = 1.24034912 Iteration 206, loss = 1.23955350 Iteration 207, loss = 1.23977819 Iteration 208, loss = 1.23981710 Iteration 209, loss = 1.23855034 Iteration 210, loss = 1.23804821 Iteration 211, loss = 1.23658957 Iteration 212, loss = 1.23453090 Iteration 213, loss = 1.23680966 Iteration 214, loss = 1.23584511 Iteration 215, loss = 1.23638338 Iteration 216, loss = 1.23498204 Iteration 217, loss = 1.23377566 Iteration 218, loss = 1.23169235 Iteration 219, loss = 1.23344910 Iteration 220, loss = 1.23035121 Iteration 221, loss = 1.23079831 Iteration 222, loss = 1.23082362 Iteration 223, loss = 1.22891373 Iteration 224, loss = 1.22926434 Iteration 225, loss = 1.22949552 Iteration 226, loss = 1.22758325 Iteration 227, loss = 1.22538895 Iteration 228, loss = 1.22941299 Iteration 229, loss = 1.22651917 Iteration 230, loss = 1.22539258 Iteration 231, loss = 1.22314115 Iteration 232, loss = 1.22292870 Iteration 233, loss = 1.22351939 Iteration 234, loss = 1.22284648 Iteration 235, loss = 1.22262918 Iteration 236, loss = 1.22216160 Iteration 237, loss = 1.22324234 Iteration 238, loss = 1.22013920 Iteration 239, loss = 1.22061406 Iteration 240, loss = 1.21929088 Iteration 241, loss = 1.21948013 Iteration 242, loss = 1.22000624 Iteration 243, loss = 1.21670278 Iteration 244, loss = 1.21713281 Iteration 245, loss = 1.21504858 Iteration 246, loss = 1.21734345 Iteration 247, loss = 1.21489239 Iteration 248, loss = 1.21517598 Iteration 249, loss = 1.21425491 Iteration 250, loss = 1.21339304 Iteration 251, loss = 1.21436840 Iteration 252, loss = 1.21302564 Iteration 253, loss = 1.21135413 Iteration 254, loss = 1.21276011 Iteration 255, loss = 1.21338067 Iteration 256, loss = 1.21052378 Iteration 257, loss = 1.21075510 Iteration 258, loss = 1.21173812 Iteration 259, loss = 1.20920365 Iteration 260, loss = 1.20938667 Iteration 261, loss = 1.20929936 Iteration 262, loss = 1.20738228 Iteration 263, loss = 1.20778956 Iteration 264, loss = 1.20542536 Iteration 265, loss = 1.20692748 Iteration 266, loss = 1.20643339 Iteration 267, loss = 1.20652082 Iteration 268, loss = 1.20513933 Iteration 269, loss = 1.20577363 Iteration 270, loss = 1.20397248 Iteration 271, loss = 1.20358436 Iteration 272, loss = 1.20401157 Iteration 273, loss = 1.20230251 Iteration 274, loss = 1.20244600 Iteration 275, loss = 1.20130751 Iteration 276, loss = 1.20254379 Iteration 277, loss = 1.20100089 Iteration 278, loss = 1.20351812 Iteration 279, loss = 1.20060906 Iteration 280, loss = 1.19905263 Iteration 281, loss = 1.20058366 Iteration 282, loss = 1.19909515 Iteration 283, loss = 1.19860447 Iteration 284, loss = 1.19726370 Iteration 285, loss = 1.19705636 Iteration 286, loss = 1.19644142 Iteration 287, loss = 1.19672242 Iteration 288, loss = 1.19435106 Iteration 289, loss = 1.19680068 Iteration 290, loss = 1.19476065 Iteration 291, loss = 1.19549818 Iteration 292, loss = 1.19594950 Iteration 293, loss = 1.19483877 Iteration 294, loss = 1.19382530 Iteration 295, loss = 1.19439492 Iteration 296, loss = 1.19200205 Iteration 297, loss = 1.19310024 Iteration 298, loss = 1.19173921 Iteration 299, loss = 1.19211695 Iteration 300, loss = 1.18971646 Iteration 301, loss = 1.19088194 Iteration 302, loss = 1.18947628 Iteration 303, loss = 1.19148037 Iteration 304, loss = 1.19007254 Iteration 305, loss = 1.19153007 Iteration 306, loss = 1.18799699 Iteration 307, loss = 1.18947394 Iteration 308, loss = 1.18902251 Iteration 309, loss = 1.18783080 Iteration 310, loss = 1.18768340 Iteration 311, loss = 1.18656816 Iteration 312, loss = 1.18681740 Iteration 313, loss = 1.18776220 Iteration 314, loss = 1.18697444 Iteration 315, loss = 1.18544563 Iteration 316, loss = 1.18609323 Iteration 317, loss = 1.18366825 Iteration 318, loss = 1.18630678 Iteration 319, loss = 1.18462528 Iteration 320, loss = 1.18326025 Iteration 321, loss = 1.18456720 Iteration 322, loss = 1.18294184 Iteration 323, loss = 1.18196819 Iteration 324, loss = 1.18161248 Iteration 325, loss = 1.18000775 Iteration 326, loss = 1.18058386 Iteration 327, loss = 1.18091746 Iteration 328, loss = 1.18181320 Iteration 329, loss = 1.18072992 Iteration 330, loss = 1.18079072 Iteration 331, loss = 1.17990111 Iteration 332, loss = 1.17945230 Iteration 333, loss = 1.17853703 Iteration 334, loss = 1.17880061 Iteration 335, loss = 1.17924660 Iteration 336, loss = 1.17971007 Iteration 337, loss = 1.17682961 Iteration 338, loss = 1.17833590 Iteration 339, loss = 1.17647520 Iteration 340, loss = 1.17854574 Iteration 341, loss = 1.17833356 Iteration 342, loss = 1.17805308 Iteration 343, loss = 1.17687539 Iteration 344, loss = 1.17450695 Iteration 345, loss = 1.17722204 Iteration 346, loss = 1.17446999 Iteration 347, loss = 1.17535215 Iteration 348, loss = 1.17369212 Iteration 349, loss = 1.17507206 Iteration 350, loss = 1.17402604 Iteration 351, loss = 1.17338226 Iteration 352, loss = 1.17282142 Iteration 353, loss = 1.17239158 Iteration 354, loss = 1.17215671 Iteration 355, loss = 1.17240056 Iteration 356, loss = 1.17194764 Iteration 357, loss = 1.17127645 Iteration 358, loss = 1.17086732 Iteration 359, loss = 1.17065371 Iteration 360, loss = 1.17097531 Iteration 361, loss = 1.17127543 Iteration 362, loss = 1.16900342 Iteration 363, loss = 1.17016765 Iteration 364, loss = 1.16950145 Iteration 365, loss = 1.16819303 Iteration 366, loss = 1.16723722 Iteration 367, loss = 1.17043376 Iteration 368, loss = 1.16764277 Iteration 369, loss = 1.16772055 Iteration 370, loss = 1.16673938 Iteration 371, loss = 1.16730686 Iteration 372, loss = 1.16765669 Iteration 373, loss = 1.16788726 Iteration 374, loss = 1.16742763 Iteration 375, loss = 1.16631057 Iteration 376, loss = 1.16558565 Iteration 377, loss = 1.16668735 Iteration 378, loss = 1.16599602 Iteration 379, loss = 1.16477776 Iteration 380, loss = 1.16487599 Iteration 381, loss = 1.16486490 Iteration 382, loss = 1.16600979 Iteration 383, loss = 1.16354682 Iteration 384, loss = 1.16413903 Iteration 385, loss = 1.16500065 Iteration 386, loss = 1.16251182 Iteration 387, loss = 1.16236578 Iteration 388, loss = 1.16306276 Iteration 389, loss = 1.16335447 Iteration 390, loss = 1.16343065 Iteration 391, loss = 1.16135950 Iteration 392, loss = 1.16055344 Iteration 393, loss = 1.16045893 Iteration 394, loss = 1.16192467 Iteration 395, loss = 1.16178712 Iteration 396, loss = 1.16086183 Iteration 397, loss = 1.16116990 Iteration 398, loss = 1.16051211 Iteration 399, loss = 1.16067194 Iteration 400, loss = 1.15969109 Iteration 401, loss = 1.16089418 Iteration 402, loss = 1.16022656 Iteration 403, loss = 1.15965108 Iteration 404, loss = 1.15777366 Iteration 405, loss = 1.15887256 Iteration 406, loss = 1.15741336 Iteration 407, loss = 1.15780886 Iteration 408, loss = 1.16033605 Iteration 409, loss = 1.15645941 Iteration 410, loss = 1.15603026 Iteration 411, loss = 1.15777953 Iteration 412, loss = 1.15767823 Iteration 413, loss = 1.15763950 Iteration 414, loss = 1.15570168 Iteration 415, loss = 1.15507134 Iteration 416, loss = 1.15556050 Iteration 417, loss = 1.15730942 Iteration 418, loss = 1.15589536 Iteration 419, loss = 1.15430426 Iteration 420, loss = 1.15440550 Iteration 421, loss = 1.15374721 Iteration 422, loss = 1.15364777 Iteration 423, loss = 1.15422351 Iteration 424, loss = 1.15500585 Iteration 425, loss = 1.15462079 Iteration 426, loss = 1.15455030 Iteration 427, loss = 1.15336067 Iteration 428, loss = 1.15392519 Iteration 429, loss = 1.15224338 Iteration 430, loss = 1.15344213 Iteration 431, loss = 1.15186469 Iteration 432, loss = 1.15211073 Iteration 433, loss = 1.15250235 Iteration 434, loss = 1.15139871 Iteration 435, loss = 1.15249208 Iteration 436, loss = 1.15112351 Iteration 437, loss = 1.15093657 Iteration 438, loss = 1.15148692 Iteration 439, loss = 1.15068045 Iteration 440, loss = 1.15116826 Iteration 441, loss = 1.15188020 Iteration 442, loss = 1.15185576 Iteration 443, loss = 1.15186159 Iteration 444, loss = 1.15220467 Iteration 445, loss = 1.14920980 Iteration 446, loss = 1.14897152 Iteration 447, loss = 1.14950987 Iteration 448, loss = 1.14819900 Iteration 449, loss = 1.14710512 Iteration 450, loss = 1.14932441 Iteration 451, loss = 1.14919183 Iteration 452, loss = 1.14794455 Iteration 453, loss = 1.15025981 Iteration 454, loss = 1.14920112 Iteration 455, loss = 1.14708757 Iteration 456, loss = 1.14758374 Iteration 457, loss = 1.14917563 Iteration 458, loss = 1.14721188 Iteration 459, loss = 1.14795628 Iteration 460, loss = 1.14918438 Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.
# Predictions
y_pred = predictions_e
y = y2_test
# Accuracy Score
accuracy_2_e = accuracy_score(y, y_pred)
print("\n\n======================")
print("Accuracy Score:")
print("======================\n\n")
print(" Accuracy: ", round(accuracy_2_e*100,2), "%")
# Classification Report
report = classification_report(y, y_pred)
print("\n\n======================")
print("Classification Report:")
print("======================\n\n")
print(report)
# Confusion Matrix
matrix = confusion_matrix(y, y_pred)
print("\n\n=================")
print("Confusion Matrix:")
print("=================\n\n")
print(matrix)
# Heat Map
print("\n\n===============================")
print("Confusion Matrix with Heat MAP:")
print("===============================\n\n")
confusion_matrix_heatmap = confusion_matrix(y, y_pred, normalize = 'true')
sns.set(rc={'figure.figsize':(10,7)})
sns.heatmap(confusion_matrix_heatmap, annot=True)
plt.show()
======================
Accuracy Score:
======================
Accuracy: 53.55 %
======================
Classification Report:
======================
precision recall f1-score support
0 0.62 0.69 0.65 425
1 0.43 0.27 0.33 396
2 0.52 0.26 0.35 413
3 0.46 0.40 0.43 434
4 0.44 0.61 0.51 392
5 0.58 0.59 0.58 406
6 0.62 0.94 0.75 404
accuracy 0.54 2870
macro avg 0.52 0.54 0.51 2870
weighted avg 0.52 0.54 0.51 2870
=================
Confusion Matrix:
=================
[[292 16 13 59 14 6 25]
[ 40 105 15 59 90 38 49]
[ 42 25 109 43 58 77 59]
[ 84 27 30 173 56 7 57]
[ 2 41 18 27 239 44 21]
[ 6 29 22 14 76 239 20]
[ 4 1 4 3 10 2 380]]
===============================
Confusion Matrix with Heat MAP:
===============================
# Save Model
filename = 'Trained Models/model_2_e.sav'
pickle.dump(model_2_e, open(filename, 'wb'))
print("\nPerformance with Models")
print("=======================\n")
x=PrettyTable()
x.add_column("Classifier Name",["CNN (Gender)","CNN (Emotion)","MLP (Gender)", "MLP (Emotion)"])
x.add_column("Accuracy Score",[history_1_g.history['accuracy'][-1],history_1_e.history['accuracy'][-1],accuracy_2_g, accuracy_2_e])
print(x)
x = ["CNN (Gender)","CNN (Emotion)","MLP (Gender)", "MLP (Emotion)"]
y = [round(history_1_g.history['accuracy'][-1],2),round(history_1_e.history['accuracy'][-1],2),round(accuracy_2_g,2), round(accuracy_2_e,2)]
plt.rcParams["figure.figsize"] = (14,5)
fig, ax = plt.subplots()
width = 0.25
ind = np.arange(len(y))
ax.barh(ind, y, width, color="blue")
ax.set_yticks((ind+width/2)-0.1)
ax.set_yticklabels(x, minor=False)
for i, v in enumerate(y):
ax.text(v + 0.01, i , str(v), color='blue', fontweight='bold')
plt.title('Comparision')
plt.xlabel('Accuracy Score')
plt.ylabel('Classifiers')
plt.show()
Performance with Models ======================= +-----------------+--------------------+ | Classifier Name | Accuracy Score | +-----------------+--------------------+ | CNN (Gender) | 0.979529619216919 | | CNN (Emotion) | 0.7447735071182251 | | MLP (Gender) | 0.8878048780487805 | | MLP (Emotion) | 0.5355400696864111 | +-----------------+--------------------+